metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jonahhw/orbitx",
"score": 3
} |
#### File: orbitx/graphics/science_mod.py
```python
from typing import Optional
import vpython
from orbitx.physics import calc
from orbitx.data_structures import Entity, PhysicsState
from orbitx.graphics.threedeeobj import ThreeDeeObj
class ScienceModule(ThreeDeeObj):
SHININIESS = 0.3
def _create_obj(
self, entity: Entity, origin: Entity,
texture: Optional[str]) -> vpython.sphere:
main_body = vpython.box()
side_panels = vpython.box(
height=2, width=0.5, length=0.6)
obj = vpython.compound(
[main_body, side_panels], make_trail=True,
texture=texture, bumpmap=vpython.textures.gravel)
obj.pos = entity.screen_pos(origin)
obj.axis = calc.angle_to_vpy(entity.heading)
obj.length = entity.r * 2
obj.height = entity.r * 2
obj.width = entity.r * 2
# A compound object doesn't actually have a radius, but we need to
# monkey-patch this for when we recentre the camera, to determine the
# relevant_range of the space station
obj.radius = entity.r
return obj
def _label_text(self, entity: Entity, state: PhysicsState) -> str:
return entity.name
``` |
{
"source": "jonahjon/canary-mesh",
"score": 3
} |
#### File: app/app/main_common.py
```python
import requests # used to make outbound requests e.g. URLget
from psutil import virtual_memory # used to get memory statistics
from psutil import cpu_count # used to get CPU counts
import os # used to get environment Variables
import hashlib
import json
import logging
LOGGER = logging.getLogger(__name__)
class MainCommon:
"""
Collection of helper functions to support the Basic ECS main app.
"""
def __init__(self, app_name):
"""
On initialization, pass in the app name that makes this unique, and get environment info.
In AWS ECS environments, the task metadata URL is dynamic. Example:
ECS_CONTAINER_METADATA_URI="http://169.254.170.2/v3/5793e693-8833-4c53-a936-bcf40cff5f0a"
https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-metadata-endpoint-v3.html
:param app_name:
"""
self.app_name = app_name
self.metadata_url = os.environ.get('ECS_CONTAINER_METADATA_URI', '')
LOGGER.info("Setting app name: [{}]".format(self.app_name))
LOGGER.info("Setting container metadata URL: [{}]".format(self.metadata_url))
def _convert_id_to_hex_color(self, instance_id):
"""
Convert instance ID (or really any string) to hex
:param instance_id:
:return:
"""
if not instance_id:
instance_id = self.app_name
m = hashlib.md5()
m.update(instance_id.encode('utf-8'))
hex_color = m.hexdigest()[:6]
LOGGER.info("Converted instance ID [{}] to hex color #[{}]".format(instance_id, hex_color))
return hex_color
@staticmethod
def _get_instance_stats():
"""
Get information from the aws metadata service.
:return: instance_id, instance_type
"""
try:
instance_id = requests.get('http://169.254.169.254/latest/meta-data/instance-id/', timeout=2).text
instance_type = requests.get('http://169.254.169.254/latest/meta-data/instance-type/', timeout=2).text
except Exception as e:
LOGGER.error("Error calling task instance metadata. Exception: {}".format(e))
instance_id = None
instance_type = None
return instance_id, instance_type
def _get_container_stats(self):
"""
This gets container metadata, if the app is running from AWS ECS. Example response:
{
"DockerId": "bc4189f761edbddec81ef75b50baebd2991827a8b2178956345cea72afec5fe9",
"Name": "service1",
"DockerName": "ecs-service1-46-service1-e2a2e5b9eeead1d75900",
"Image": "1234567890.dkr.ecr.us-west-2.amazonaws.com/demo-app:92d505",
"ImageID": "sha256:43c2cffe831ca58807cb962b5cd112faefe2e0273e2b5c4bef0c7b1193d76a53",
"Ports": [
{
"ContainerPort": 80,
"Protocol": "tcp"
}
],
"Labels": {
"com.amazonaws.ecs.cluster": "mycluster",
"com.amazonaws.ecs.container-name": "service1",
"com.amazonaws.ecs.task-arn": "arn:aws:ecs:us-west-2:1234567890:task/321ce4e1-9df0-4fcf-8352-c8221892ecac",
"com.amazonaws.ecs.task-definition-family": "service1",
"com.amazonaws.ecs.task-definition-version": "46"
},
"DesiredStatus": "RUNNING",
"KnownStatus": "RUNNING",
"Limits": {
"CPU": 10,
"Memory": 512
},
"CreatedAt": "2019-07-01T19:13:40.755779226Z",
"StartedAt": "2019-07-01T19:13:42.9792103Z",
"Type": "NORMAL",
"Networks": [
{
"NetworkMode": "bridge",
"IPv4Addresses": [
"172.17.0.10"
]
}
],
"Volumes": [
{
"Source": "/var/lib/ecs/data/metadata/3212e4e1-9df0-4fcf-8352-c8221895ecac/service1",
"Destination": "/opt/ecs/metadata/bcc7af6b-94ce-411a-b726-3e26df28ad48"
}
]
}
"""
metadata = {}
metadata['debug'] = {}
metadata['debug']['metadata_url'] = self.metadata_url
if self.metadata_url:
try:
task_metadata_raw = requests.get(self.metadata_url, timeout=2).text
LOGGER.debug('Metadata Raw:')
LOGGER.debug(json.dumps(task_metadata_raw))
task_metadata = json.loads(task_metadata_raw)
metadata['status'] = True
except Exception as e:
LOGGER.error("Error calling task metadata URL [{}]. Exception: {}".format(self.metadata_url, e))
metadata['debug']['exception'] = e
metadata['status'] = False
# Build up a list of interesting metadata to return
metadata["ecs_task_name"] = task_metadata.get("Name", "")
metadata["ecr_image"] = task_metadata.get("Image", "")
limits = task_metadata.get("Limits", {})
metadata["task_cpu_limit"] = limits.get("CPU", "")
metadata["task_mem_limit"] = limits.get("Memory", "")
labels = task_metadata.get("Labels", {})
metadata["cluster"] = labels.get("com.amazonaws.ecs.cluster", "")
metadata["container_name"] = labels.get("com.amazonaws.ecs.container-name", "")
else:
LOGGER.error("Did not find metadata URL in the environment. Try running `echo $ECS_CONTAINER_METADATA_URI`")
return metadata
@staticmethod
def _get_version():
"""
This assumes a build system is updating the version.txt indide the app directory with a current value.
:return: The contents of version.txt within the app root directory.
"""
try:
version_file = open(os.path.join('version.txt'))
version = version_file.read().strip()
LOGGER.info("Read version [{}] from version.txt".format(version))
except Exception as e:
LOGGER.error("Error reading version from file: version.txt", e)
version = "UNKNOWN"
return version
def get_info(self):
message = '<p>Hi, my name is <b>' + self.app_name + '</b>, version: <b>' + self._get_version() + '</b></p>'
# AWS Instance Info
instance_id, instance_type = self._get_instance_stats()
if instance_id:
message += '<p><b>I appear to be running on Amazon Web Services.</b></p>'
message += '<div id=instance_stats>The instance I am running on is:<br />'
message += ' Instance ID: <b>' + instance_id + '</b><br />'
message += ' Instance Type: <b>' + instance_type + '</b><br />'
message += '</div>'
else:
message += '<p>I cannot seem to hit the AWS metadata service. Perhaps I am not running on AWS?</p>'
# AWS ECS Container Info
task_metadata = self._get_container_stats()
LOGGER.info('Task Metadata')
LOGGER.info(task_metadata)
if task_metadata.get('status', False):
message += '<div id=instance_stats>The ECS container I am running on is:<br />'
message += ' Task Name: <b>' + task_metadata.get("ecs_task_name", "Not found") + '</b><br />'
message += ' ECR Image: <b>' + task_metadata.get("ecr_image", "Not found") + '</b><br />'
message += ' Task CPU Limit: <b>' + str(task_metadata.get("task_cpu_limit", "Not found")) + '</b><br />'
message += ' Task Memory Limit: <b>' + str(task_metadata.get("task_mem_limit", "Not found")) + '</b><br />'
message += ' ECS Cluster: <b>' + task_metadata.get("cluster", "Not found") + '</b><br />'
message += ' Container Name: <b>' + task_metadata.get("container_name", "Not found") + '</b><br />'
message += '</div>'
else:
message += '<p>I cannot seem to hit the ECS metadata service. Perhaps I am not running on AWS ECS?</p>'
bg_color = self._convert_id_to_hex_color(instance_id)
# HTML headers for the response.
response = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset=\"UTF-8\">
<title>Where am I running?</title>
<style>
"""
response += "body {background-color: #" + bg_color + "; font: 1em \"Open Sans\", sans-serif;}"
response += """
</style>
</head>
<body>
"""
response += message
# Memory and CPUs
mem = virtual_memory()
memmegs =int(mem.total/(1024*1024))
vcpu = cpu_count()
pcpu = cpu_count(logical=False)
response += '<div id=resources>I have access to <b>'+str(memmegs)+' MB of memory.</b><br/>'
response += 'I have access to <b>'+str(vcpu)+' virtual CPUs.</b><br/>'
response += 'I have access to <b>'+str(pcpu)+' physical CPUs.</b><br/>'
response += '</div>'
# close the HTML and return it (END OF INFO service)
response += "</body>"
response += "</html>"
return response
@staticmethod
def get_route_frontend_file(app, path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = os.path.join(app.static_folder, path)
if os.path.isfile(file_path):
return file_path
# ...or should be handled by the SPA's "router" in front end
else:
return None
```
#### File: canary/lib/asg.py
```python
import boto3
import inspect
#asg_client = boto3.client('autoscaling', region_name="us-west-2")
class ASG(object):
def __init__(self, logger, **kwargs):
self.logger = logger
if kwargs is not None:
if kwargs.get('credentials') is None:
logger.debug("Setting up {} BOTO3 Client with default credentials".format(self.__class__.__name__))
self.asg_client = boto3.client('autoscaling', region_name='us-west-2')
elif kwargs.get('region') is not None:
logger.debug("Setting up {} BOTO3 Client with default credentials in region {}".format(self.__class__.__name__, kwargs.get('region')))
self.asg_client = boto3.client('autoscaling', region_name=kwargs.get('region'))
else:
logger.debug("Setting up {} BOTO3 Client with ASSUMED ROLE credentials".format(self.__class__.__name__))
cred = kwargs.get('credentials')
self.asg_client = boto3.client('autoscaling',
aws_access_key_id=cred.get('AccessKeyId'),
aws_secret_access_key=cred.get('SecretAccessKey'),
aws_session_token=cred.get('SessionToken')
)
else:
logger.debug("Setting up {} BOTO3 Client with default credentials".format(self.__class__.__name__))
self.asg_client = boto3.client('autoscaling')
def error_message(self, stack_trace, e):
message = {'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__,
'METHOD': stack_trace, 'EXCEPTION': str(e)}
return message
def suspend_asg_except_launch(self, AutoScalingGroupName):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
response = self.asg_client.suspend_processes(
AutoScalingGroupName=AutoScalingGroupName,
# Here we have to specify pausing all actions besides Launching
# This allows us to pause scaling actions, and manually scale to double the size to handle B/G
ScalingProcesses=[
'Terminate',
'HealthCheck',
'ReplaceUnhealthy',
'AZRebalance',
'AlarmNotification',
'ScheduledActions',
'AddToLoadBalancer'
]
)
return response
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
def resume_processes(self, AutoScalingGroupName):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
response = self.asg_client.resume_processes(
AutoScalingGroupName=AutoScalingGroupName,
# Here we have to specify pausing all actions besides Launching
# This allows us to pause scaling actions, and manually scale to double the size to handle B/G
ScalingProcesses=[
'Terminate',
'HealthCheck',
'ReplaceUnhealthy',
'AZRebalance',
'AlarmNotification',
'ScheduledActions',
'AddToLoadBalancer'
]
)
return response
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
def describe_auto_scaling_group_size(self, AutoScalingGroupName):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
response = self.asg_client.describe_auto_scaling_groups(
AutoScalingGroupNames=[
AutoScalingGroupName,
]
)
return response
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
def update_asg(self, AutoScalingGroupName, MinSize, MaxSize, DesiredCapacity):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
response = self.asg_client.update_auto_scaling_group(
AutoScalingGroupName=AutoScalingGroupName,
MinSize=MinSize,
MaxSize=MaxSize,
DesiredCapacity=DesiredCapacity
)
return response
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
def get_paginator(self, name):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
paginator = self.asg_client.get_paginator(name)
return paginator
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
def get_name_by_tag(self, TagKey, TagValue):
method = inspect.stack()[0][3]
self.logger.info('Executing function {}'.format(method))
try:
paginator = self.asg_client.get_paginator('describe_auto_scaling_groups')
page_iterator = paginator.paginate(
PaginationConfig={'PageSize': 100}
)
filtered_asgs = page_iterator.search(
'AutoScalingGroups[] | [?contains(Tags[?Key==`{}`].Value, `{}`)]'.format(
TagKey, TagValue)
)
for asg in filtered_asgs:
self.logger.info("Found Groups with matching tags {}".format(asg['AutoScalingGroupName']))
return asg['AutoScalingGroupName']
except Exception as e:
self.logger.exception(self.error_message(method, e))
raise
```
#### File: canary/lib/cwlogs.py
```python
import boto3
import inspect
import time
from botocore.exceptions import ClientError, ParamValidationError
#log_client = boto3.client('logs', region_name="us-west-2")
class Logs(object):
def __init__(self, **kwargs):
if kwargs is not None:
if kwargs.get('credentials') is None:
print("Setting up {} BOTO3 Client with default credentials".format(self.__class__.__name__))
self.log_client = boto3.client('logs', region_name='us-west-2')
elif kwargs.get('region') is not None:
print("Setting up {} BOTO3 Client with default credentials in region {}".format(self.__class__.__name__, kwargs.get('region')))
self.log_client = boto3.client('logs', region_name=kwargs.get('region'))
else:
print("Setting up {} BOTO3 Client with ASSUMED ROLE credentials".format(self.__class__.__name__))
cred = kwargs.get('credentials')
self.log_client = boto3.client('logs',
aws_access_key_id=cred.get('AccessKeyId'),
aws_secret_access_key=cred.get('SecretAccessKey'),
aws_session_token=cred.get('SessionToken')
)
else:
print("Setting up {} BOTO3 Client with default credentials".format(self.__class__.__name__))
self.log_client = boto3.client('logs')
def error_message(self, stack_trace, e):
message = {'FILE': __file__.split('/')[-1], 'CLASS': self.__class__.__name__,
'METHOD': stack_trace, 'EXCEPTION': str(e)}
return message
def create_log_group(self, logGroupName):
method = inspect.stack()[0][3]
print(f"Executing function {method}")
try:
response = self.log_client.create_log_group(
logGroupName=logGroupName
)
return response
except self.log_client.exceptions.ResourceAlreadyExistsException:
print(f"already have log group using..... {logGroupName}")
except Exception as e:
error = self.error_message(method, e)
print(f"{error}")
raise
def create_log_stream(self, logGroupName, logStreamName):
method = inspect.stack()[0][3]
print(f"Executing function {method}")
try:
response = self.log_client.create_log_stream(
logGroupName=logGroupName,
logStreamName=logStreamName
)
return response
except self.log_client.exceptions.ResourceAlreadyExistsException:
print(f"already have log stream using .... {logStreamName}")
except Exception as e:
error = self.error_message(method, e)
print(f"{error}")
raise
def describe_log_group(self, logGroupName):
method = inspect.stack()[0][3]
print(f"Executing function {method}")
try:
response = self.log_client.describe_log_groups(
logGroupNamePrefix=logGroupName
)
try:
logGroup = response['logGroups'][0]['logGroupName']
return logGroup
except Exception as e:
return None
except Exception as e:
error = self.error_message("describe_log_group", e)
print(f"{error}")
raise
def describe_log_streams(self, logGroupName, logStreamName):
method = inspect.stack()[0][3]
print(f"Executing function {method}")
try:
response = self.log_client.describe_log_streams(
logGroupName=logGroupName,
logStreamNamePrefix=logStreamName
)
return response
except Exception as e:
error = self.error_message("describe_log_streams", e)
print(f"{error}")
raise
def describe_log_stream_sequence_token(self, logGroupName, logStreamName):
try:
response = self.log_client.describe_log_streams(
logGroupName=logGroupName,
logStreamNamePrefix=logStreamName
)
try:
uploadSequenceToken = response['logStreams'][0]['uploadSequenceToken']
return uploadSequenceToken
except Exception as e:
return None
except Exception as e:
error = self.error_message("describe_log_stream_sequence_token", e)
print(f"{error}")
raise
def put_log_events(self, logGroupName, logStreamName, logEvents, **kwargs):
# Converts time in seconds to milliseconds for boto3
timestamp = int(round(time.time() * 1000))
try:
uploadSequenceToken = self.describe_log_stream_sequence_token(logGroupName=logGroupName, logStreamName=logStreamName)
if uploadSequenceToken is not None:
response = self.log_client.put_log_events(logGroupName=logGroupName, logStreamName=logStreamName, sequenceToken=uploadSequenceToken,
logEvents=[
{
'timestamp': timestamp,
'message': logEvents
}
]
)
return response
elif kwargs['token_pass']:
response = self.log_client.put_log_events(logGroupName=logGroupName, logStreamName=logStreamName,
logEvents=[
{
'timestamp': timestamp,
'message': logEvents
}
]
)
else:
pass
except KeyError as e:
return
except self.log_client.exceptions.ResourceNotFoundException as e:
return
except Exception as e:
error = self.error_message("put_logs", e)
print(f"{error}")
raise
```
#### File: canary/lib/fake_init.py
```python
from validators import string, integer, integer_list_item
import sys
import types
import re
valid_names = re.compile(r'^[a-zA-Z0-9]+$')
class BaseAWSObject(object):
def __init__(self, **kwargs):
self.propnames = self.props.keys()
self.title = kwargs['title']
self.attributes = [
'Condition', 'CreationPolicy', 'DeletionPolicy', 'DependsOn',
'Metadata', 'UpdatePolicy', 'UpdateReplacePolicy',
]
print(self.propnames)
for k, (_, required) in self.props.items():
v = getattr(type(self), k, None)
if v is not None and k not in kwargs:
self.__setattr__(k, v)
# Now that it is initialized, populate it with the kwargs
for k, v in kwargs.items():
self.__setattr__(k, v)
def __getattr__(self, name):
# If pickle loads this object, then __getattr__ will cause
# an infinite loop when pickle invokes this object to look for
# __setstate__ before attributes is "loaded" into this object.
# Therefore, short circuit the rest of this call if attributes
# is not loaded yet.
name = self.title
if "attributes" not in self.__dict__:
print(self.__dict__)
raise AttributeError(name)
try:
if name in self.attributes:
return self.resource[name]
else:
return self.properties.__getitem__(name)
except KeyError:
# Fall back to the name attribute in the object rather than
# in the properties dict. This is for non-OpenStack backwards
# compatibility since OpenStack objects use a "name" property.
if name == 'name':
return self.__getattribute__('title')
print(name)
raise AttributeError(name)
def __setattr__(self, name, value):
if name in self.__dict__.keys() \
or '_BaseAWSObject__initialized' not in self.__dict__:
return dict.__setattr__(self, name, value)
elif name in self.propnames:
# Check the type of the object and compare against what we were
# expecting.
expected_type = self.props[name][0]
# If it's a function, call it...
if isinstance(expected_type, types.FunctionType):
try:
value = expected_type(value)
except Exception:
sys.stderr.write(
"%s: %s.%s function validator '%s' threw "
"exception:\n" % (self.__class__,
self.title,
name,
expected_type.__name__))
raise
return self.properties.__setitem__(name, value)
# If it's a list of types, check against those types...
elif isinstance(expected_type, list):
# If we're expecting a list, then make sure it is a list
if not isinstance(value, list):
self._raise_type(name, value, expected_type)
# Iterate over the list and make sure it matches our
# type checks (as above accept AWSHelperFn because
# we can't do the validation ourselves)
for v in value:
if not isinstance(v, tuple(expected_type)):
self._raise_type(name, v, expected_type)
# Validated so assign it
return self.properties.__setitem__(name, value)
# Final validity check, compare the type of value against
# expected_type which should now be either a single type or
# a tuple of types.
elif isinstance(value, expected_type):
return self.properties.__setitem__(name, value)
else:
self._raise_type(name, value, expected_type)
def _raise_type(self, name, value, expected_type):
raise TypeError('%s: %s.%s is %s, expected %s' % (self.__class__,
self.title,
name,
type(value),
expected_type))
def validate_title(self):
if not valid_names.match(self.title):
raise ValueError('Name "%s" not alphanumeric' % self.title)
else:
print(self.title)
def validate(self):
pass
def no_validation(self):
self.do_validation = False
return self
@classmethod
def from_dict(cls, title, d):
return cls._from_dict(title, **d)
def _validate_props(self):
for k, (_, required) in self.props.items():
if required and k not in self.properties:
rtype = getattr(self, 'resource_type', "<unknown type>")
title = getattr(self, 'title')
msg = "Resource %s required in type %s" % (k, rtype)
if title:
msg += " (title: %s)" % title
raise ValueError(msg)
class AWSProperty(BaseAWSObject):
"""
Used for CloudFormation Resource Property objects
http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/
aws-product-property-reference.html
"""
dictname = None
def __init__(self, title=None, **kwargs):
super(AWSProperty, self).__init__(title, **kwargs)
def encode_to_dict(obj):
if hasattr(obj, 'to_dict'):
# Calling encode_to_dict to ensure object is
# nomalized to a base dictionary all the way down.
return encode_to_dict(obj.to_dict())
elif isinstance(obj, (list, tuple)):
new_lst = []
for o in list(obj):
new_lst.append(encode_to_dict(o))
return new_lst
elif isinstance(obj, dict):
props = {}
for name, prop in obj.items():
props[name] = encode_to_dict(prop)
return props
# This is useful when dealing with external libs using
# this format. Specifically awacs.
elif hasattr(obj, 'JSONrepr'):
return encode_to_dict(obj.JSONrepr())
return obj
```
#### File: canary/lib/validators.py
```python
from typing import get_type_hints
from functools import wraps
from inspect import getfullargspec
def validate_input(obj, **kwargs):
hints = get_type_hints(obj)
# iterate all type hints
for attr_name, attr_type in hints.items():
if attr_name == 'return':
continue
if not isinstance(kwargs[attr_name], attr_type):
raise TypeError(
'Argument %r is not of type %s' % (attr_name, attr_type)
)
def type_check(decorator):
@wraps(decorator)
def wrapped_decorator(*args, **kwargs):
# translate *args into **kwargs
func_args = getfullargspec(decorator)[0]
kwargs.update(dict(zip(func_args, args)))
validate_input(decorator, **kwargs)
return decorator(**kwargs)
return wrapped_decorator
``` |
{
"source": "jonahjones777/eksboot",
"score": 2
} |
#### File: src/resources/routes.py
```python
from flask import Flask, request
from flask_restplus import Api, Resource, Namespace, reqparse
from server.instance import server
from models.models import create_parser, create_addon_parser
import os
import typing
from environment.logger import logger
import uuid
app, api = server.app, server.api
response_dict = {
200: 'OK',
400: 'Invalid Arguement',
500: 'Mapping Key Error'
}
###############
#### Health ###
###############
health_ns = Namespace(
name='Health',
description='My health related routes',
path='/health'
)
api.add_namespace(health_ns)
@health_ns.route("/")
class Health(Resource):
@health_ns.doc(responses=response_dict)
def get(self):
return {'response': 'ok'}
###############
#### Create ###
###############
create_ns = Namespace(
name='Create',
description='My create related routes',
path='/create'
)
api.add_namespace(create_ns)
@create_ns.route('/')
class Create(Resource):
@create_ns.expect(create_parser)
@create_ns.doc(responses=response_dict)
def post(self):
args = create_parser.parse_args()
create_addon_args = create_addon_parser.parse_args(req=create_parser)
logger.info('/create/ POST')
id = uuid.uuid4()
try:
return str(id)
except Exception as e:
logger.info(e)
return{"error": "true"}
# except KeyError as e:
# print(e)
# api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
# except Exception as e:
# print(e)
# api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
###############
#### Status ###
###############
status_ns = Namespace(
name='Status',
description='My create related routes',
path='/status'
)
api.add_namespace(status_ns)
@status_ns.route('/*')
class StatusAll(Resource):
@status_ns.doc(responses=response_dict)
def get(self):
logger.info('/status/* GET')
logger.info(request.json)
try:
return{
"Path":"Status/*"
}
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
@status_ns.route('/<string:name>')
class Status(Resource):
@status_ns.doc(responses=response_dict)
def get(self):
logger.info('/status/<name> GET')
logger.info(request.json)
try:
return{
"Path":"Status/<name>"
}
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
###############
#### Update ###
###############
update_ns = Namespace(
name='Update',
description='My create related routes',
path='/update'
)
api.add_namespace(update_ns)
@update_ns.route('/<string:name>')
class StatusAll(Resource):
@update_ns.doc(responses=response_dict)
def get(self):
logger.info('/update/<name> GET')
logger.info(request.json)
try:
return{
"Path":"Update/<name>"
}
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
###############
#### Delete ###
###############
delete_ns = Namespace(
name='Delete',
description='My create related routes',
path='/delete'
)
api.add_namespace(delete_ns)
@delete_ns.route('/<string:name>')
class Delete(Resource):
@delete_ns.doc(responses=response_dict)
def post(self):
logger.info('/delete/<name> GET')
logger.info(request.json)
try:
return{
"Path":"Delete/<name>"
}
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
```
#### File: src/environment/template.py
```python
import subprocess
import json
import jinja2
import os
import zipfile
import uuid
def write_jinja_file(logger, d, i_filename, o_filename, path):
try:
cwd = os.getcwd()
os.chdir(path)
logger.info(f"jinja templating {i_filename}")
j2loader = jinja2.FileSystemLoader(path)
j2env = jinja2.Environment(loader=j2loader)
j2template = j2env.get_template(i_filename)
output = j2template.render(d)
with open(o_filename, "w") as fh:
fh.write(output)
fh.close()
os.chdir(cwd)
return
except Exception as e:
logger.exception(e)
os.chdir(cwd)
raise
def zip_function_upload(logger, zip_file_name, path):
try:
cwd = os.getcwd()
os.chdir(path)
if os.path.exists(zip_file_name):
try:
os.remove(zip_file_name)
except OSError:
pass
zip_file = zipfile.ZipFile(zip_file_name, mode='a')
for folder, subs, files in os.walk('.'):
for filename in files:
file_path = os.path.join(folder, filename)
if not zip_file_name in file_path:
zip_file.write(file_path)
zip_file.close()
os.chdir(cwd)
return zip_file_name
except Exception as e:
logger.exception(e)
os.chdir(cwd)
raise
class SetEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
def streaming_output(cmd, dirc, logger):
try:
p = subprocess.Popen(cmd,
cwd=dirc,
stdout=subprocess.PIPE)
for line in iter(p.stdout.readline, b''):
logger.info('>>> {}'.format(line.rstrip()))
return
except Exception as e:
logger.info(e)
raise
def create_cdk_json(d, cdk_dir, logger):
try:
cdk_file = '{}/cdk.json'.format(cdk_dir)
with open(cdk_file, 'w') as outfile:
json.dump({
'app':'python3 main.py',
'context':d
}, outfile, indent=2, separators=(',', ': '), cls=SetEncoder)
logger.info("Creating cdk context file:")
logger.info(d)
return
except Exception as e:
logger.info(e)
raise
```
#### File: src/resources/create.py
```python
from flask import Flask
from flask_restplus import Api, Resource, Namespace, reqparse
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
from server.instance import server
from models.models import create_parser, create_addon_parser
from environment.logger_aws import Logger
from environment.template import write_jinja_file, zip_function_upload, streaming_output, create_cdk_json
from lib.s3 import S3
import os
import uuid
app, region, api = server.app, server.region, server.api
aws_logger = Logger(loglevel='info')
response_dict = {
200: 'OK',
400: 'Invalid Arguement',
500: 'Mapping Key Error'
}
limiter = Limiter(
app,
key_func=get_remote_address,
default_limits=["2 per second, 10 per minute"]
)
#IAM
# 1. Provide role to use
# 2. Add role user to assume from CLI
# 3. Give assume role, and eks update conmmand
###############
#### Create ###
###############
create_ns = Namespace(
name='Create',
description='My create related routes',
path='/create'
)
api.add_namespace(create_ns)
@create_ns.route('/')
class Create(Resource):
@create_ns.expect(create_parser)
@create_ns.doc(responses=response_dict)
@limiter.limit("10/minute;2/second")
def post(self):
'''
Create s3 bucket for stack # Maybe we can have people pass this in as an input?
Jinja template out our clutster creation file using the post input
Update the CDK json context with name/crud call and invoke the CDK
Create some sort of data structure to pass back for auth into the cluster
'''
aws_logger.info('/create/ POST')
args = create_parser.parse_args()
create_addon_args = create_addon_parser.parse_args(req=create_parser)
chdir = os.getcwd()
aws_logger.info(args)
args['region'] = region
s3 = S3(aws_logger, region=region)
write_jinja_file(aws_logger,
d=args,
i_filename='cluster.yaml.j2',
o_filename='cluster.yaml',
path=f"{chdir}/codebuild/"
)
write_jinja_file(aws_logger,
d=args,
i_filename='buildspec_create.yml.j2',
o_filename='buildspec.yml',
path=f"{chdir}/codebuild/"
)
zipped = zip_function_upload(aws_logger,
zip_file_name='buildspec.yml.zip',
path=f"{chdir}/codebuild/"
)
aws_logger.info(f"Create zipfile {zipped}.... Uploading to bucket: {args['s3bucket']}")
s3.upload_file(bucket=args['s3bucket'], file_name=f"{chdir}/codebuild/buildspec.yml.zip", file_obj=zipped)
create_cdk_json(
{
'name':args['name'],
's3_bucket':args['s3bucket'],
'zipfile':'buildspec.yml.zip',
'iamrole':args['iamrole']
},
f"{chdir}/cdk",
aws_logger
)
aws_logger.info('created the cdk.json file for the CDK params')
s3.upload_dict(f"{args['name']}.json", args, args['s3bucket'])
streaming_output(["cdk", "deploy", "--require-approval", "never"], f"{chdir}/cdk/", aws_logger)
try:
return args
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
```
#### File: src/resources/update.py
```python
from flask import Flask
from flask_restplus import Api, Resource, Namespace, reqparse
from server.instance import server
from models.models import update_parser, create_addon_parser
from environment.logger_flask import logger
from environment.logger_aws import Logger
from environment.template import write_jinja_file, zip_function_upload, streaming_output, create_cdk_json
from lib.s3 import S3
import os
app, region, api = server.app, server.region, server.api
aws_logger = Logger(loglevel='info')
response_dict = {
200: 'OK',
400: 'Invalid Arguement',
500: 'Mapping Key Error'
}
###############
#### Update ###
###############
update_ns = Namespace(
name='Update',
description='My create related routes',
path='/update'
)
api.add_namespace(update_ns)
@update_ns.route('/<string:name>')
class Status(Resource):
@update_ns.expect(update_parser)
@update_ns.doc(responses=response_dict)
def post(self, name):
'''
Create s3 bucket for stack # Maybe we can have people pass this in as an input?
Jinja template out our clutster creation file using the post input
Update the CDK json context with name/crud call and invoke the CDK
Create some sort of data structure to pass back for auth into the cluster
'''
aws_logger.info('/update/ POST')
args = create_parser.parse_args()
create_addon_args = create_addon_parser.parse_args(req=create_parser)
chdir = os.getcwd()
aws_logger.info(args)
# template_dict = args['addons']
# template_dict.update({'name':args['name']})
#
s3 = S3(aws_logger)
write_jinja_file(aws_logger,
d=args,
i_filename='cluster.yaml.j2',
o_filename='cluster.yaml',
path=f"{chdir}/codebuild/"
)
write_jinja_file(aws_logger,
d=args,
i_filename='buildspec_create.yml.j2',
o_filename='buildspec.yml',
path=f"{chdir}/codebuild/"
)
zipped = zip_function_upload(aws_logger,
zip_file_name='buildspec.yml.zip',
path=f"{chdir}/codebuild/"
)
aws_logger.info(f"Create zipfile {zipped}.... Uploading to bucket: {args['s3bucket']}")
s3.upload_file(bucket=args['s3bucket'], file_name=f"{chdir}/codebuild/buildspec.yml.zip", file_obj=zipped)
create_cdk_json(
{
'name':args['name'],
's3_bucket':args['s3bucket'],
'zipfile':'buildspec.yml.zip',
'iamrole':args['iamrole']
},
f"{chdir}/cdk/",
aws_logger
)
aws_logger.info('created the cdk.json file for the CDK params')
streaming_output(["cdk", "deploy", "--require-approval", "never"], f"{chdir}/cdk/", aws_logger)
passback = {'name':args['name']}
try:
return args
except KeyError as e:
print(e)
api.abort(500, e.__doc__, status = "Could not save information", statusCode = "500")
except Exception as e:
print(e)
api.abort(400, e.__doc__, status = "Could not save information", statusCode = "400")
```
#### File: src/server/instance.py
```python
import os
from flask import Flask, Blueprint
from flask_restplus import Api, Resource, fields
from environment.environment import environment_config
class Server(object):
def __init__(self):
self.app = Flask(__name__)
self.region = os.getenv('AWS_DEFAULT_REGION', default='us-west-2')
self.api = Api(self.app,
version='1.0',
title='EKS Boot',
description='A simple app to manage EKS',
doc = environment_config["swagger-url"]
)
def run(self):
self.app.run(
host="0.0.0.0",
debug = environment_config["debug"],
port = environment_config["port"]
)
server = Server()
``` |
{
"source": "jonahkh/pelorus",
"score": 2
} |
#### File: pelorus/exporters/pelorus.py
```python
import logging
import os
import sys
from datetime import datetime, timezone
from kubernetes import config
DEFAULT_APP_LABEL = 'app.kubernetes.io/name'
DEFAULT_PROD_LABEL = ''
DEFAULT_LOG_LEVEL = 'INFO'
loglevel = os.getenv('LOG_LEVEL', DEFAULT_LOG_LEVEL)
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
print("Initializing Logger wit LogLevel: %s" % loglevel.upper())
def load_kube_config():
if "OPENSHIFT_BUILD_NAME" in os.environ:
config.load_incluster_config()
file_namespace = open(
"/run/secrets/kubernetes.io/serviceaccount/namespace", "r"
)
if file_namespace.mode == "r":
namespace = file_namespace.read()
print("namespace: %s\n" % (namespace))
else:
config.load_kube_config()
def convert_date_time_to_timestamp(date_time):
# Confirm we have a proper float value
str(date_time)
timestamp = datetime.strptime(date_time, '%Y-%m-%dT%H:%M:%SZ')
unixformattime = timestamp.replace(tzinfo=timezone.utc).timestamp()
return unixformattime
def get_app_label():
return os.getenv('APP_LABEL', DEFAULT_APP_LABEL)
def get_prod_label():
return os.getenv('PROD_LABEL', DEFAULT_PROD_LABEL)
def check_required_config(vars):
missing_configs = False
for var in vars:
if var not in os.environ:
logging.error("Missing required environment variable '%s'." % var)
missing_configs = True
if missing_configs:
logging.error("This program will exit.")
sys.exit(1)
``` |
{
"source": "jonah-kohn/cloud",
"score": 2
} |
#### File: tests/integration/run_on_script_test.py
```python
import os
import sys
from typing import Text
from unittest import mock
import tensorflow as tf
import tensorflow_cloud as tfc
# Following are the env variables available in test infrastructure:
#
# The staging bucket to use for cloudbuild as well as save the model and data.
# TEST_BUCKET = os.environ['TEST_BUCKET']
#
# The project id to use to run tests.
# PROJECT_ID = os.environ['PROJECT_ID']
#
# The GCP region in which the end-to-end test is run.
# REGION = os.environ['REGION']
#
# Unique ID for this build, can be used as a label for an AI Platform training job.
# BUILD_ID = os.environ['BUILD_ID']
class RunOnScriptTest(tf.test.TestCase):
def setUp(self):
super(RunOnScriptTest, self).setUp()
# To keep track of content that needs to be deleted in teardown clean up
self.test_folders = []
self.test_data_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "../testdata/"
)
def tearDown(self):
super(RunOnScriptTest, self).tearDown()
# Clean up any temporary file or folder created during testing.
for folder in self.test_folders:
self.delete_dir(folder)
def delete_dir(self, path: Text) -> None:
"""Deletes a directory if exists."""
if tf.io.gfile.isdir(path):
tf.io.gfile.rmtree(path)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_mirrored_strategy(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
chief_config=tfc.COMMON_MACHINE_CONFIGS['T4_2X'],
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_one_device_strategy(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_one_device_strategy_bucket_build(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
docker_image_bucket_name="TEST_BUCKET",
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_multi_worker_strategy(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
worker_count=1,
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_none_dist_strat_multi_worker_strategy(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_ctl.py"),
distribution_strategy=None,
worker_count=2,
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_none_dist_strat_multi_worker_strategy_bucket_build(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_ctl.py"),
worker_count=2,
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
docker_image_bucket_name="TEST_BUCKET",
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_tpu(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
chief_config=tfc.COMMON_MACHINE_CONFIGS["CPU"],
worker_count=1,
worker_config=tfc.COMMON_MACHINE_CONFIGS["TPU"],
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_one_device_stream_logs(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
stream_logs=True,
)
mock_exit.assert_called_once_with(0)
@mock.patch.object(sys, "exit", autospec=True)
def test_auto_one_device_job_labels(self, mock_exit):
tfc.run(
entry_point=os.path.join(self.test_data_path, "mnist_example_using_fit.py"),
requirements_txt=os.path.join(self.test_data_path, "requirements.txt"),
job_labels={"job": "on_script_tests", "team": "keras"},
)
mock_exit.assert_called_once_with(0)
if __name__ == "__main__":
tf.test.main()
``` |
{
"source": "JonahKr/core",
"score": 2
} |
#### File: components/netatmo/media_source.py
```python
import datetime as dt
import re
from typing import Optional, Tuple
from homeassistant.components.media_player.const import MEDIA_TYPE_VIDEO
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.components.media_source.const import MEDIA_MIME_TYPES
from homeassistant.components.media_source.error import Unresolvable
from homeassistant.components.media_source.models import (
BrowseMediaSource,
MediaSource,
MediaSourceItem,
PlayMedia,
)
from homeassistant.core import HomeAssistant, callback
from .const import DATA_CAMERAS, DATA_EVENTS, DOMAIN, MANUFACTURER
MIME_TYPE = "application/x-mpegURL"
async def async_get_media_source(hass: HomeAssistant):
"""Set up Netatmo media source."""
return NetatmoSource(hass)
class NetatmoSource(MediaSource):
"""Provide Netatmo camera recordings as media sources."""
name: str = MANUFACTURER
def __init__(self, hass: HomeAssistant):
"""Initialize Netatmo source."""
super().__init__(DOMAIN)
self.hass = hass
self.events = self.hass.data[DOMAIN][DATA_EVENTS]
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve media to a url."""
_, camera_id, event_id = async_parse_identifier(item)
url = self.events[camera_id][event_id]["media_url"]
return PlayMedia(url, MIME_TYPE)
async def async_browse_media(
self, item: MediaSourceItem, media_types: Tuple[str] = MEDIA_MIME_TYPES
) -> Optional[BrowseMediaSource]:
"""Return media."""
try:
source, camera_id, event_id = async_parse_identifier(item)
except Unresolvable as err:
raise BrowseError(str(err)) from err
return self._browse_media(source, camera_id, event_id)
def _browse_media(
self, source: str, camera_id: str, event_id: int
) -> Optional[BrowseMediaSource]:
"""Browse media."""
if camera_id and camera_id not in self.events:
raise BrowseError("Camera does not exist.")
if event_id and event_id not in self.events[camera_id]:
raise BrowseError("Event does not exist.")
return self._build_item_response(source, camera_id, event_id)
def _build_item_response(
self, source: str, camera_id: str, event_id: int = None
) -> Optional[BrowseMediaSource]:
if event_id and event_id in self.events[camera_id]:
created = dt.datetime.fromtimestamp(event_id)
thumbnail = self.events[camera_id][event_id].get("snapshot", {}).get("url")
message = remove_html_tags(self.events[camera_id][event_id]["message"])
title = f"{created} - {message}"
else:
title = self.hass.data[DOMAIN][DATA_CAMERAS].get(camera_id, MANUFACTURER)
thumbnail = None
if event_id:
path = f"{source}/{camera_id}/{event_id}"
else:
path = f"{source}/{camera_id}"
media = BrowseMediaSource(
domain=DOMAIN,
identifier=path,
media_content_type=MEDIA_TYPE_VIDEO,
title=title,
can_play=bool(
event_id and self.events[camera_id][event_id].get("media_url")
),
can_expand=event_id is None,
thumbnail=thumbnail,
)
if not media.can_play and not media.can_expand:
return None
if not media.can_expand:
return media
media.children = []
# Append first level children
if not camera_id:
for cid in self.events:
child = self._build_item_response(source, cid)
if child:
media.children.append(child)
else:
for eid in self.events[camera_id]:
child = self._build_item_response(source, camera_id, eid)
if child:
media.children.append(child)
return media
def remove_html_tags(text):
"""Remove html tags from string."""
clean = re.compile("<.*?>")
return re.sub(clean, "", text)
@callback
def async_parse_identifier(
item: MediaSourceItem,
) -> Tuple[str, str, Optional[int]]:
"""Parse identifier."""
if not item.identifier:
return "events", "", None
source, path = item.identifier.lstrip("/").split("/", 1)
if source != "events":
raise Unresolvable("Unknown source directory.")
if "/" in path:
camera_id, event_id = path.split("/", 1)
return source, camera_id, int(event_id)
return source, path, None
```
#### File: components/shelly/light.py
```python
from aioshelly import Block
from homeassistant.components.light import SUPPORT_BRIGHTNESS, LightEntity
from homeassistant.core import callback
from . import ShellyDeviceWrapper
from .const import DOMAIN
from .entity import ShellyBlockEntity
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up lights for device."""
wrapper = hass.data[DOMAIN][config_entry.entry_id]
blocks = [block for block in wrapper.device.blocks if block.type == "light"]
if not blocks:
return
async_add_entities(ShellyLight(wrapper, block) for block in blocks)
class ShellyLight(ShellyBlockEntity, LightEntity):
"""Switch that controls a relay block on Shelly devices."""
def __init__(self, wrapper: ShellyDeviceWrapper, block: Block) -> None:
"""Initialize light."""
super().__init__(wrapper, block)
self.control_result = None
self._supported_features = 0
if hasattr(block, "brightness"):
self._supported_features |= SUPPORT_BRIGHTNESS
@property
def is_on(self) -> bool:
"""If light is on."""
if self.control_result:
return self.control_result["ison"]
return self.block.output
@property
def brightness(self):
"""Brightness of light."""
if self.control_result:
brightness = self.control_result["brightness"]
else:
brightness = self.block.brightness
return int(brightness / 100 * 255)
@property
def supported_features(self):
"""Supported features."""
return self._supported_features
async def async_turn_on(
self, brightness=None, **kwargs
): # pylint: disable=arguments-differ
"""Turn on light."""
params = {"turn": "on"}
if brightness is not None:
params["brightness"] = int(brightness / 255 * 100)
self.control_result = await self.block.set_state(**params)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn off light."""
self.control_result = await self.block.set_state(turn="off")
self.async_write_ha_state()
@callback
def _update_callback(self):
"""When device updates, clear control result that overrides state."""
self.control_result = None
super()._update_callback()
``` |
{
"source": "JonahKr/Hermes_Hass_App",
"score": 2
} |
#### File: Hermes_Hass_App/app/hass_synonym_db.py
```python
from tinydb import TinyDB, Query
from hass_websocket_client import hass_websocket_client
from typing import Union
import json
db_path = "../synonym_db.json"
"""
Filestructure:
$ALL:
$ALL:
User1:
User2:
Zone1:
$ALL:
User2:
"""
class hass_synonym_db:
def __init__(self, ws: hass_websocket_client):
self.ws = ws
self.db = TinyDB(db_path)
self.zone_query = Query()
def new_zone(self, zone_name: str) -> (bool,Union(str,None)):
req: list = self.db.search(self.zone_query.name == zone_name)
if len(req) > 0:
return (False, "This Zone already exists")
else:
self.db.insert({'name':zone_name, 'user':{'$ALL':{}}})
return True
def delete_zone(self, zone_name: str):
self.db.remove(self.zone_query.name == zone_name)
def update_zone(self, zone_name: str, new_zone_name: str):
self.db.update({'name': new_zone_name}, self.zone_query.name == zone_name)
def is_zone(self,zone_name: str) -> bool:
if(d)
def new_synonym(self, entity_id: str, synonym: str, zone: str = '$ALL', user: str = '$ALL'):
self.db.insert()
test1 = hass_synonym_db()
``` |
{
"source": "JonahKr/rhasspy-hermes-app",
"score": 3
} |
#### File: rhasspy-hermes-app/tests/test_arguments.py
```python
import argparse
import pytest
import sys
from rhasspyhermes_app import HermesApp
def test_default_arguments(mocker):
"""Test whether default arguments are set up correctly in a HermesApp object."""
app = HermesApp("Test default arguments", mqtt_client=mocker.MagicMock())
assert app.args.host == "localhost"
assert app.args.port == 1883
assert app.args.tls == False
assert app.args.username is None
assert app.args.password is None
def test_arguments_from_cli(mocker):
"""Test whether arguments from the command line are set up correctly in a HermesApp object."""
mocker.patch(
"sys.argv",
[
"rhasspy-hermes-app-test",
"--host",
"rhasspy.home",
"--port",
"8883",
"--tls",
"--username",
"rhasspy-hermes-app",
"--password",
"<PASSWORD>",
],
)
app = HermesApp("Test arguments in init", mqtt_client=mocker.MagicMock())
assert app.args.host == "rhasspy.home"
assert app.args.port == 8883
assert app.args.tls == True
assert app.args.username == "rhasspy-hermes-app"
assert app.args.password == "<PASSWORD>"
def test_arguments_in_init(mocker):
"""Test whether arguments are set up correctly while initializing a HermesApp object."""
app = HermesApp(
"Test arguments in init",
mqtt_client=mocker.MagicMock(),
host="rhasspy.home",
port=8883,
tls=True,
username="rhasspy-hermes-app",
password="<PASSWORD>",
)
assert app.args.host == "rhasspy.home"
assert app.args.port == 8883
assert app.args.tls == True
assert app.args.username == "rhasspy-hermes-app"
assert app.args.password == "<PASSWORD>"
def test_if_cli_arguments_overwrite_init_arguments(mocker):
"""Test whether arguments from the command line overwrite arguments to a HermesApp object."""
mocker.patch(
"sys.argv",
[
"rhasspy-hermes-app-test",
"--host",
"rhasspy.home",
"--port",
"1883",
"--username",
"rhasspy-hermes-app",
"--password",
"<PASSWORD>",
],
)
app = HermesApp(
"Test arguments in init",
mqtt_client=mocker.MagicMock(),
host="rhasspy.local",
port=8883,
username="rhasspy-hermes-app-test",
password="<PASSWORD>",
)
assert app.args.host == "rhasspy.home"
assert app.args.port == 1883
assert app.args.username == "rhasspy-hermes-app"
assert app.args.password == "<PASSWORD>"
def test_if_cli_arguments_overwrite_init_arguments_with_argument_parser(mocker):
"""Test whether arguments from the command line overwrite arguments to a HermesApp object
if the user supplies their own ArgumentParser object."""
mocker.patch(
"sys.argv",
[
"rhasspy-hermes-app-test",
"--host",
"rhasspy.home",
"--port",
"1883",
"--username",
"rhasspy-hermes-app",
"--password",
"<PASSWORD>",
"--test-argument",
"foobar",
"--test-flag",
],
)
parser = argparse.ArgumentParser(prog="rhasspy-hermes-app-test")
parser.add_argument("--test-argument", default="foo")
parser.add_argument("--test-flag", action="store_true")
app = HermesApp(
"Test arguments in init",
parser=parser,
mqtt_client=mocker.MagicMock(),
host="rhasspy.local",
port=8883,
username="rhasspy-hermes-app-test",
password="<PASSWORD>",
test_argument="bar",
)
assert app.args.host == "rhasspy.home"
assert app.args.port == 1883
assert app.args.username == "rhasspy-hermes-app"
assert app.args.password == "<PASSWORD>"
assert app.args.test_argument == "foobar"
assert app.args.test_flag == True
``` |
{
"source": "jonahliu0426/smart-photot-album",
"score": 2
} |
#### File: smart-photot-album/lambda/index-photos.py
```python
import json
import boto3
import datetime
import base64
import urllib3
MASTER_USER = 'adminuser'
MASTER_PASSWORD = '<PASSWORD>'
DOMAIN_ENDPOINT = 'https://search-photos-cqrdioq663y64lxnqrgjhstgfm.us-east-1.es.amazonaws.com'
INDEX = 'photo'
TYPE = 'Photo'
BASE_URL = '%s/%s/%s' % (DOMAIN_ENDPOINT, INDEX, TYPE)
http = urllib3.PoolManager()
authorization = base64.b64encode(('%s:%s' % (MASTER_USER, MASTER_PASSWORD)).encode('ascii')).decode('ascii')
headers = {
'Authorization': 'Basic ' + authorization,
'Content-Type': 'application/json'
}
def add(data, key):
"""
Add image key and labels data to OpenSearch
"""
response = http.request("POST", url='%s/%s' % (BASE_URL, key), headers=headers, body=data)
return
def lambda_handler(event, context):
# get bucket and keys from S3 object
bucket = event['Records'][0]['s3']['bucket']['name']
key = event['Records'][0]['s3']['object']['key']
detected_labels_list = detect_labels(key, bucket)
labels_name_list = [label['Name'].strip().lower() for label in detected_labels_list]
s3 = boto3.client('s3')
data = s3.head_object(Bucket=bucket, Key=key)
# combine detected labels with user-defined custom labels
labels = json.loads(data['Metadata']['label']) + labels_name_list
timestamp = data["LastModified"]
# construct json format data
response = {
"objectKey": key,
"bucket": bucket,
"createdTimestamp": timestamp,
"labels": labels,
}
res = json.dumps(response, default=str)
# add data to OpenSearch
add(res, key)
return {
'statusCode': 200,
'body': res
}
# AWS Rekognition detect labels
def detect_labels(photo, bucket):
client=boto3.client('rekognition')
response = client.detect_labels(Image={'S3Object':{'Bucket':bucket,'Name':photo}}, MaxLabels=10)
return response['Labels']
``` |
{
"source": "jonahmackey/big-sleep",
"score": 2
} |
#### File: big-sleep/big_sleep/big_sleep_og.py
```python
import os
import sys
import subprocess
import signal
import string
import re
from datetime import datetime
from pathlib import Path
import random
import torch
import torch.nn.functional as F
from torch import nn
from torch.optim import Adam
from torchvision.utils import save_image
import torchvision.transforms as T
from PIL import Image
from tqdm import tqdm, trange
from big_sleep.ema import EMA
from big_sleep.resample import resample
from big_sleep.biggan import BigGAN
from big_sleep.clip import load, tokenize
assert torch.cuda.is_available(), 'CUDA must be available in order to use Big Sleep'
# graceful keyboard interrupt
terminate = False
def signal_handling(signum,frame):
global terminate
terminate = True
signal.signal(signal.SIGINT,signal_handling)
# helpers
def exists(val):
return val is not None
def open_folder(path):
if os.path.isfile(path):
path = os.path.dirname(path)
if not os.path.isdir(path):
return
cmd_list = None
if sys.platform == 'darwin':
cmd_list = ['open', '--', path]
elif sys.platform == 'linux2' or sys.platform == 'linux':
cmd_list = ['xdg-open', path]
elif sys.platform in ['win32', 'win64']:
cmd_list = ['explorer', path.replace('/','\\')]
if cmd_list == None:
return
try:
subprocess.check_call(cmd_list)
except subprocess.CalledProcessError:
pass
except OSError:
pass
def create_text_path(text=None, img=None, encoding=None):
input_name = ""
if text is not None:
input_name += text
if img is not None:
if isinstance(img, str):
img_name = "".join(img.split(".")[:-1]) # replace spaces by underscores, remove img extension
img_name = img_name.split("/")[-1] # only take img name, not path
else:
img_name = "PIL_img"
input_name += "_" + img_name
if encoding is not None:
input_name = "your_encoding"
return input_name.replace("-", "_").replace(",", "").replace(" ", "_").replace("|", "--").strip('-_')[:255]
# tensor helpers
def differentiable_topk(x, k, temperature=1.):
n, dim = x.shape
topk_tensors = []
for i in range(k):
is_last = i == (k - 1)
values, indices = (x / temperature).softmax(dim=-1).topk(1, dim=-1)
topks = torch.zeros_like(x).scatter_(-1, indices, values)
topk_tensors.append(topks)
if not is_last:
x = x.scatter(-1, indices, float('-inf'))
topks = torch.cat(topk_tensors, dim=-1)
return topks.reshape(n, k, dim).sum(dim = 1)
def create_clip_img_transform(image_width):
clip_mean = [0.48145466, 0.4578275, 0.40821073]
clip_std = [0.26862954, 0.26130258, 0.27577711]
transform = T.Compose([
#T.ToPILImage(),
T.Resize(image_width),
T.CenterCrop((image_width, image_width)),
T.ToTensor(),
T.Normalize(mean=clip_mean, std=clip_std)
])
return transform
def rand_cutout(image, size, center_bias=False, center_focus=2):
width = image.shape[-1]
min_offset = 0
max_offset = width - size
if center_bias:
# sample around image center
center = max_offset / 2
std = center / center_focus
offset_x = int(random.gauss(mu=center, sigma=std))
offset_y = int(random.gauss(mu=center, sigma=std))
# resample uniformly if over boundaries
offset_x = random.randint(min_offset, max_offset) if (offset_x > max_offset or offset_x < min_offset) else offset_x
offset_y = random.randint(min_offset, max_offset) if (offset_y > max_offset or offset_y < min_offset) else offset_y
else:
offset_x = random.randint(min_offset, max_offset)
offset_y = random.randint(min_offset, max_offset)
cutout = image[:, :, offset_x:offset_x + size, offset_y:offset_y + size]
return cutout
# load biggan
class Latents(torch.nn.Module):
def __init__(
self,
num_latents = 15,
num_classes = 1000,
z_dim = 128,
max_classes = None,
class_temperature = 2.
):
super().__init__()
self.normu = torch.nn.Parameter(torch.zeros(num_latents, z_dim).normal_(std = 1))
self.cls = torch.nn.Parameter(torch.zeros(num_latents, num_classes).normal_(mean = -3.9, std = .3))
self.register_buffer('thresh_lat', torch.tensor(1))
assert not exists(max_classes) or max_classes > 0 and max_classes <= num_classes, f'max_classes must be between 0 and {num_classes}'
self.max_classes = max_classes
self.class_temperature = class_temperature
def forward(self):
if exists(self.max_classes):
classes = differentiable_topk(self.cls, self.max_classes, temperature = self.class_temperature)
else:
classes = torch.sigmoid(self.cls)
return self.normu, classes
class Model(nn.Module):
def __init__(
self,
image_size,
max_classes = None,
class_temperature = 2.,
ema_decay = 0.99
):
super().__init__()
assert image_size in (128, 256, 512), 'image size must be one of 128, 256, or 512'
self.biggan = BigGAN.from_pretrained(f'biggan-deep-{image_size}')
self.max_classes = max_classes
self.class_temperature = class_temperature
self.ema_decay\
= ema_decay
self.init_latents()
def init_latents(self):
latents = Latents(
num_latents = len(self.biggan.config.layers) + 1,
num_classes = self.biggan.config.num_classes,
z_dim = self.biggan.config.z_dim,
max_classes = self.max_classes,
class_temperature = self.class_temperature
)
self.latents = EMA(latents, self.ema_decay)
def forward(self):
self.biggan.eval()
out = self.biggan(*self.latents(), 1)
return (out + 1) / 2
class BigSleep(nn.Module):
def __init__(
self,
num_cutouts = 128,
loss_coef = 100,
image_size = 512,
bilinear = False,
max_classes = None,
class_temperature = 2.,
experimental_resample = False,
ema_decay = 0.99,
center_bias = False,
img_enc_model = 'ViT-B/32'
):
super().__init__()
self.loss_coef = loss_coef
self.image_size = image_size
self.num_cutouts = num_cutouts
self.experimental_resample = experimental_resample
self.center_bias = center_bias
self.img_enc_model = img_enc_model
self.interpolation_settings = {'mode': 'bilinear', 'align_corners': False} if bilinear else {'mode': 'nearest'}
# load clip
perceptor, normalize_image = load(self.img_enc_model, jit = False)
self.perceptor = perceptor
self.normalize_image = normalize_image
self.model = Model(
image_size = image_size,
max_classes = max_classes,
class_temperature = class_temperature,
ema_decay = ema_decay
)
def reset(self):
self.model.init_latents()
def sim_txt_to_img(self, text_embed, img_embed, text_type="max"):
sign = -1
if text_type == "min":
sign = 1
return sign * self.loss_coef * torch.cosine_similarity(text_embed, img_embed, dim = -1).mean()
def forward(self, text_embeds, text_min_embeds=[], return_loss = True):
width, num_cutouts = self.image_size, self.num_cutouts
out = self.model()
if not return_loss:
return out
pieces = []
for ch in range(num_cutouts):
# sample cutout size
size = int(width * torch.zeros(1,).normal_(mean=.8, std=.3).clip(.5, .95))
# get cutout
apper = rand_cutout(out, size, center_bias=self.center_bias)
if (self.experimental_resample):
apper = resample(apper, (224, 224))
else:
apper = F.interpolate(apper, (224, 224), **self.interpolation_settings)
pieces.append(apper)
into = torch.cat(pieces)
into = self.normalize_image(into)
image_embed = self.perceptor.encode_image(into)
latents, soft_one_hot_classes = self.model.latents()
num_latents = latents.shape[0]
latent_thres = self.model.latents.model.thresh_lat
lat_loss = torch.abs(1 - torch.std(latents, dim=1)).mean() + \
torch.abs(torch.mean(latents, dim = 1)).mean() + \
4 * torch.max(torch.square(latents).mean(), latent_thres)
for array in latents:
mean = torch.mean(array)
diffs = array - mean
var = torch.mean(torch.pow(diffs, 2.0))
std = torch.pow(var, 0.5)
zscores = diffs / std
skews = torch.mean(torch.pow(zscores, 3.0))
kurtoses = torch.mean(torch.pow(zscores, 4.0)) - 3.0
lat_loss = lat_loss + torch.abs(kurtoses) / num_latents + torch.abs(skews) / num_latents
cls_loss = ((50 * torch.topk(soft_one_hot_classes, largest = False, dim = 1, k = 999)[0]) ** 2).mean()
results = []
for txt_embed in text_embeds:
results.append(self.sim_txt_to_img(txt_embed, image_embed))
for txt_min_embed in text_min_embeds:
results.append(0.5 * self.sim_txt_to_img(txt_min_embed, image_embed, "min"))
sim_loss = sum(results).mean()
return out, (lat_loss, cls_loss, sim_loss)
class Imagine(nn.Module):
def __init__(
self,
*,
text=None,
img=None,
encoding=None,
text_min = "",
lr = .07,
image_size = 512,
gradient_accumulate_every = 1,
save_every = 50,
epochs = 20,
iterations = 1050,
save_progress = False,
bilinear = False,
open_folder = True,
seed = None,
append_seed = False,
torch_deterministic = False,
max_classes = None,
class_temperature = 2.,
save_date_time = False,
save_best = False,
experimental_resample = False,
ema_decay = 0.99,
num_cutouts = 128,
center_bias = False,
save_dir = None,
img_enc_model = 'ViT-B/32'
):
super().__init__()
if torch_deterministic:
assert not bilinear, 'the deterministic (seeded) operation does not work with interpolation (PyTorch 1.7.1)'
torch.set_deterministic(True)
self.seed = seed
self.append_seed = append_seed
if exists(seed):
print(f'setting seed of {seed}')
if seed == 0:
print('you can override this with --seed argument in the command line, or --random for a randomly chosen one')
torch.manual_seed(seed)
self.epochs = epochs
self.iterations = iterations
model = BigSleep(
image_size = image_size,
bilinear = bilinear,
max_classes = max_classes,
class_temperature = class_temperature,
experimental_resample = experimental_resample,
ema_decay = ema_decay,
num_cutouts = num_cutouts,
center_bias = center_bias,
img_enc_model = img_enc_model
).cuda()
self.model = model
self.lr = lr
self.optimizer = Adam(model.model.latents.model.parameters(), lr)
self.gradient_accumulate_every = gradient_accumulate_every
self.save_every = save_every
self.save_dir = save_dir
self.save_progress = save_progress
self.save_date_time = save_date_time
self.save_best = save_best
self.current_best_score = 0
self.open_folder = open_folder
self.total_image_updates = (self.epochs * self.iterations) / self.save_every
self.encoded_texts = {
"max": [],
"min": []
}
# create img transform
self.clip_transform = create_clip_img_transform(224)
# create starting encoding
self.set_clip_encoding(text=text, img=img, encoding=encoding, text_min=text_min)
@property
def seed_suffix(self):
return f'.{self.seed}' if self.append_seed and exists(self.seed) else ''
def set_text(self, text):
self.set_clip_encoding(text = text)
def create_clip_encoding(self, text=None, img=None, encoding=None):
self.text = text
self.img = img
if encoding is not None:
encoding = encoding.cuda()
#elif self.create_story:
# encoding = self.update_story_encoding(epoch=0, iteration=1)
elif text is not None and img is not None:
encoding = (self.create_text_encoding(text) + self.create_img_encoding(img)) / 2
elif text is not None:
encoding = self.create_text_encoding(text)
elif img is not None:
encoding = self.create_img_encoding(img)
return encoding
def create_text_encoding(self, text):
tokenized_text = tokenize(text).cuda()
with torch.no_grad():
text_encoding = self.model.perceptor.encode_text(tokenized_text).detach()
return text_encoding
def create_img_encoding(self, img):
if isinstance(img, str):
img = Image.open(img)
normed_img = self.clip_transform(img).unsqueeze(0).cuda()
with torch.no_grad():
img_encoding = self.model.perceptor.encode_image(normed_img).detach()
return img_encoding
def encode_multiple_phrases(self, text, img=None, encoding=None, text_type="max"):
if text is not None and "|" in text:
self.encoded_texts[text_type] = [self.create_clip_encoding(text=prompt_min, img=img, encoding=encoding) for prompt_min in text.split("|")]
else:
self.encoded_texts[text_type] = [self.create_clip_encoding(text=text, img=img, encoding=encoding)]
def encode_max_and_min(self, text, img=None, encoding=None, text_min=""):
self.encode_multiple_phrases(text, img=img, encoding=encoding)
if text_min is not None and text_min != "":
self.encode_multiple_phrases(text_min, img=img, encoding=encoding, text_type="min")
def set_clip_encoding(self, text=None, img=None, encoding=None, text_min=""):
self.current_best_score = 0
self.text = text
self.text_min = text_min
if len(text_min) > 0:
full_text = text + "_wout_" + text_min[:255] if text is not None else "wout_" + text_min[:255]
else:
full_text = text
text_path = create_text_path(text=full_text, img=img, encoding=encoding)
if self.save_date_time:
text_path = datetime.now().strftime("%y%m%d-%H%M%S-") + text_path
self.text_path = text_path
if self.save_dir is not None:
self.filename = Path(f'./{self.save_dir}/{text_path}{self.seed_suffix}.png')
else:
self.filename = Path(f'./{text_path}{self.seed_suffix}.png')
self.encode_max_and_min(text, img=img, encoding=encoding, text_min=text_min) # Tokenize and encode each prompt
def reset(self):
self.model.reset()
self.model = self.model.cuda()
self.optimizer = Adam(self.model.model.latents.parameters(), self.lr)
def train_step(self, epoch, i, pbar=None):
total_loss = 0
for _ in range(self.gradient_accumulate_every):
out, losses = self.model(self.encoded_texts["max"], self.encoded_texts["min"])
loss = sum(losses) / self.gradient_accumulate_every
total_loss += loss
loss.backward()
self.optimizer.step()
self.model.model.latents.update()
self.optimizer.zero_grad()
if (i + 1) % self.save_every == 0:
with torch.no_grad():
self.model.model.latents.eval()
out, losses = self.model(self.encoded_texts["max"], self.encoded_texts["min"])
top_score, best = torch.topk(losses[2], k=1, largest=False)
image = self.model.model()[best].cpu()
self.model.model.latents.train()
print("datatype:", image.dtype, "\nfilepath:", str(self.filename), "\nshape:", image.shape)
save_image(image, str(self.filename))
if pbar is not None:
pbar.update(1)
else:
print(f'image updated at "./{str(self.filename)}"')
if self.save_progress:
total_iterations = epoch * self.iterations + i
num = total_iterations // self.save_every
if self.save_dir is not None:
save_image(image, Path(f'./{self.save_dir}/{self.text_path}.{num:04d}{self.seed_suffix}.png'))
else:
save_image(image, Path(f'./{self.text_path}.{num:04d}{self.seed_suffix}.png'))
if self.save_best and top_score.item() < self.current_best_score:
self.current_best_score = top_score.item()
if self.save_dir is not None:
save_image(image, Path(f'./{self.save_dir}/{self.text_path}{self.seed_suffix}.best.png'))
else:
save_image(image, Path(f'./{self.text_path}{self.seed_suffix}.best.png'))
return out, total_loss
def forward(self):
penalizing = ""
if len(self.text_min) > 0:
penalizing = f'penalizing "{self.text_min}"'
print(f'Imagining "{self.text_path}" {penalizing}...')
with torch.no_grad():
self.model(self.encoded_texts["max"][0]) # one warmup step due to issue with CLIP and CUDA
if self.open_folder:
open_folder('./')
self.open_folder = False
image_pbar = tqdm(total=self.total_image_updates, desc='image update', position=2, leave=True)
for epoch in trange(self.epochs, desc = ' epochs', position=0, leave=True):
pbar = trange(self.iterations, desc=' iteration', position=1, leave=True)
image_pbar.update(0)
for i in pbar:
out, loss = self.train_step(epoch, i, image_pbar)
pbar.set_description(f'loss: {loss.item():04.2f}')
if terminate:
print('detecting keyboard interrupt, gracefully exiting')
return
``` |
{
"source": "jonahmajumder/bookmarker",
"score": 3
} |
#### File: jonahmajumder/bookmarker/bookmarks.py
```python
from PyPDF2 import PdfFileWriter, PdfFileReader
from PyPDF2.generic import Destination
from PyPDF2.utils import PdfReadError
from PyQt5.Qt import QStandardItemModel, QStandardItem, Qt
import os
import json
from tempfile import NamedTemporaryFile
class BookmarkItem(QStandardItem):
def __init__(self, title='', page=None):
super().__init__()
self.setEditable(False)
self.setText(title)
self.setPage(page)
def page(self):
return self.data(Qt.UserRole)
def setPage(self, page):
self.setData(page, Qt.UserRole)
def toDict(self):
return {'text': self.text(), 'page': self.page(), 'children': []}
@staticmethod
def fromDict(itemDict):
return BookmarkItem(itemDict['text'], itemDict['page'])
class BookmarkModel(QStandardItemModel):
"""
subclass of QStandardItemModel to represent bookmark tree
"""
def __init__(self, pdffile):
super().__init__()
self.reader = None
self.writer = None
self.initFromPdfFile(pdffile)
def clear(self):
self.removeRows(0, self.rowCount())
def addBookmarkNodeFromDest(self, item, parent=None, lastBookmark=None):
if isinstance(item, Destination):
bookmark = BookmarkItem(item.title, self.reader.getDestinationPageNumber(item))
parent.appendRow(bookmark)
return bookmark
elif isinstance(item, list):
parent = lastBookmark
lastBookmark = None
for m in item:
lastBookmark = self.addBookmarkNodeFromDest(m, parent, lastBookmark)
def initFromPdfFile(self, filename):
self.clear()
infile = open(filename, 'rb')
self.reader = PdfFileReader(infile, strict=False)
box = self.reader.getPage(0).mediaBox
self.dimensions = [box[2] - box[0], box[3] - box[1]]
try:
self.addBookmarkNodeFromDest(self.reader.outlines, parent=self, lastBookmark=self.invisibleRootItem())
except PdfReadError:
pass
self.reader = None
infile.close()
def writeBookmarks(self, parentNode, parentBookmark=None):
for row in range(parentNode.rowCount()):
item = parentNode.child(row)
dest = self.writer.addBookmark(item.text(), item.page(), parentBookmark)
if item.hasChildren():
self.writeBookmarks(item, dest)
def writeToPdfFile(self, oldfilename, newfilename):
oldfile = open(oldfilename, 'rb')
reader = PdfFileReader(oldfile, strict=False)
self.writer = PdfFileWriter()
self.writer.appendPagesFromReader(reader)
self.writeBookmarks(self.invisibleRootItem())
# make new file as temp file regardless, then copy it to relevant file
# advantage -- works for both "save as" and "save"
with NamedTemporaryFile(delete=False) as temp:
with open(temp.name, 'wb') as newfile:
self.writer.write(newfile)
oldfile.close()
self.writer = None
os.replace(temp.name, os.path.abspath(newfilename))
def bookmarkDictionary(self, parentNode):
dictList = []
for row in range(parentNode.rowCount()):
item = parentNode.child(row)
itemDict = item.toDict()
itemDict['children'] = self.bookmarkDictionary(item) if item.hasChildren() else []
dictList.append(itemDict)
return dictList
def fullDictionary(self):
return self.bookmarkDictionary(self.invisibleRootItem())
def exportJsonBookmarks(self, filename):
with open(filename, 'w') as f:
json.dump(self.fullDictionary(), f, indent=2)
def initFromJson(self, filename, clear=True):
if clear:
self.clear()
with open(filename, 'r') as f:
d = json.load(f)
if isinstance(d, dict):
self.addBookmarkNodeFromDict(d, self.invisibleRootItem())
elif isinstance(d, list):
[self.addBookmarkNodeFromDict(dd, self.invisibleRootItem()) for dd in d]
def addBookmarkNodeFromDict(self, itemDict, parent):
node = BookmarkItem.fromDict(itemDict)
parent.appendRow(node)
for ch in itemDict['children']:
self.addBookmarkNodeFromDict(ch, node)
```
#### File: jonahmajumder/bookmarker/closetest.py
```python
import sys
from PyQt5.QtWidgets import (QApplication, QWidget, QMessageBox)
class MainWindow(QWidget):
def __init__(self):
super().__init__()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Window Close', 'Are you sure you want to close the window?',
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
print('Window closed')
else:
event.ignore()
if __name__ == '__main__':
app = QApplication(sys.argv)
demo = MainWindow()
demo.show()
sys.exit(app.exec_())
```
#### File: jonahmajumder/bookmarker/locations.py
```python
import sys
from pathlib import Path
from datetime import datetime
IS_BUNDLED = getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')
LOGFILE = 'log.txt'
def log_header():
header = '\n'
header += 'Log file initiated at {}.\n'.format(datetime.now().isoformat())
header += 50 * '-'
header += '\n\n'
return header
if IS_BUNDLED:
RELATIVE_PATH = Path(sys._MEIPASS).parent / 'Resources'
else:
RELATIVE_PATH = Path(__file__).parents[0]
# in Resource dir within app bundle
def ResourceFile(path):
return str(Path.cwd() / RELATIVE_PATH / path)
HOME = str(Path.home())
DOCUMENTS = str(Path.home() / 'Documents')
if IS_BUNDLED:
# set up app to write to logfile
with open(ResourceFile(LOGFILE), 'a') as file:
file.write(log_header())
sys.stdout = open(ResourceFile(LOGFILE), 'a')
sys.stderr = open(ResourceFile(LOGFILE), 'a')
``` |
{
"source": "jonahmajumder/pyglass",
"score": 2
} |
#### File: pyglass/quicklook/models.py
```python
from ..models import Exportable, ExportFormat
class QLExportable(Exportable):
''' Base class for any exportable QuickLook item '''
def __init__(self, filename):
self.filename = filename
super(QLExportable, self).__init__()
def __unicode__(self):
return '<QLExportable>'
def Page(QLExportable):
''' For multi-page files, e.g. if pdf preview '''
def __init__(self, filename, page_id):
self.id = page_id
super(Page, self).__init__(filename)
def export(self, export_format=ExportFormat.PNG):
pass
```
#### File: pyglass/pyglass/utils.py
```python
import logging
import os
# Library modules
from pxprocess import check_output
from pyunicode import safely_decode
logger = logging.getLogger(__name__)
def execute(cmd):
''' Call cmd and return output. return None if any exception occurs '''
try:
return safely_decode(check_output(cmd))
except Exception as e:
logger.warn('Couldnt execute cmd: %s.\nReason: %s' % (cmd, e))
return None
def unicode_or_none(dictionary, key):
if dictionary is None or key is None:
return None
return None if key not in dictionary or dictionary[key] is None else str(dictionary[key])
def extension(path_str):
''' Returns lowercased file extension for the path '''
return os.path.splitext(path_str)[1].lower()
def mimetype(path_str):
''' Returns the mimetype of the file at path_str. Depends on OS X's `file` util '''
return execute(['file', '--mime-type', '--brief', path_str]).strip().lower()
``` |
{
"source": "jonahmakowski/PyWrskp",
"score": 4
} |
#### File: src/alarm/time_only.py
```python
import datetime
def work(hour, mint, print_info=True):
hour = int(hour)
mint = int(mint)
now = datetime.datetime.now()
past_time = datetime.datetime(now.year, now.month, now.day, hour, mint)
past_time = past_time.strftime("%X")
while True:
now = datetime.datetime.now()
now = now.strftime("%X")
if now == past_time:
if print_info:
print('DING-DONG')
print('Time up!')
return 'DING-Dong'
```
#### File: src/other/animal_guesser.py
```python
import os
import json
try:
pyWrkspLoc = os.environ["PYWRKSP"]
except KeyError:
pyWrkspLoc = os.environ["HOME"] + input('Since you do not have the PYWRSKP env var '
'\nPlease enter the pwd for the pyWrskp repo not including the '
'"home" section')
class AnimalChoser:
def __init__(self, pywrskp):
self.name_1 = pywrskp + '/docs/txt-files/animal_chooser_options.txt'
self.name_2 = pywrskp + '/docs/txt-files/animal_chooser_atturbites.txt'
self.options = []
self.atturbites = []
self.load()
self.correct_atturbites = []
do = input('would you like to add or play?')
if do == 'play':
print('The current options for this game are:')
for item in self.options:
print(item['name'])
self.guess()
else:
self.add()
def guess(self):
for item in self.atturbites:
yes_or_no = input('is this animal/does it have {} (y/n)?'.format(item['name']))
item['y/n'] = yes_or_no
for item in self.atturbites:
if item['y/n'] == 'y':
self.correct_atturbites.append(item['name'])
choosen = False
for item in self.options:
item['info'] = sorted(item['info'])
self.correct_atturbites = sorted(self.correct_atturbites)
for item in self.options:
if item['info'] == self.correct_atturbites:
print('your animal is {}'.format(item['name']))
choosen = True
break
if not choosen:
print("This program can figure out what you choose, make sure it is on this list:")
for item in self.options:
print(item['name'])
'''print('debug info:')
print('self.correct_atturbites:')
print(self.correct_atturbites)
print('self.options:')
print(self.options)'''
def load(self):
try:
with open(self.name_1) as json_file:
self.options = json.load(json_file)
except FileNotFoundError:
print('This file does not exist (num1)')
exit(5)
try:
with open(self.name_2) as json_file:
self.atturbites = json.load(json_file)
except FileNotFoundError:
print('This file does not exist (num2)')
exit(5)
def add(self):
new_name = input('What is the name of this animal?')
new = {"name": new_name, "info": []}
new_attrbs = []
print('What are the atturbuites?')
while True:
attrb = input()
if attrb == '':
break
new_attrbs.append(attrb)
new["info"].append(attrb)
for item in new_attrbs:
for atra in self.atturbites:
if item == atra:
del item
for item in new_attrbs:
self.atturbites.append({'name': item, "y/n": ""})
self.options.append(new)
with open(self.name_1, 'w') as outfile:
json.dump(self.options, outfile)
with open(self.name_2, 'w') as outfile:
json.dump(self.atturbites, outfile)
game = AnimalChoser(pyWrkspLoc)
```
#### File: Other_from_2020-2021/class2020/class_2wrk.py
```python
import pygame
DIS_WIDTH = 1280
DIS_HEIGHT = 820
pygame.init()
screen = pygame.display.set_mode([DIS_WIDTH, DIS_HEIGHT])
pygame.display.set_caption("Paint")
buttonMinus = pygame.image.load('button_minus.png')
buttonPlus = pygame.image.load('button_plus.png')
keep_going = True
mouse_down = False
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
GREY = (131, 131, 131)
currentColour = RED
radius = 5
colors = [RED, GREEN, BLUE]
buttons = [buttonMinus, buttonPlus]
recWidth = int(DIS_WIDTH * 0.1)
recHeight = int(DIS_HEIGHT * 0.09)
button_size = int(recHeight / 2)
max_radius = int(recHeight / 2)
location_x = ((DIS_WIDTH - (max_radius * 4)))
def menu():
global location_x
global button_size
global colors
topRectangle = pygame.Rect((0, 0), (DIS_WIDTH, recHeight))
pygame.draw.rect(screen, GREY, topRectangle)
pygame.draw.circle(screen, currentColour, (DIS_WIDTH - radius, radius), radius)
x = 0
for col in colors:
rectangle = pygame.Rect((x, 0), (recWidth, recHeight))
pygame.draw.rect(screen, col, rectangle)
x += recWidth
y = 0
for button in buttons:
plus_minus = pygame.transform.scale(button, (button_size, button_size))
plus_minus_rec = plus_minus.get_rect(topleft = (location_x, y))
screen.blit(plus_minus, plus_minus_rec)
y += button_size
pygame.display.update()
def check_color():
global colors
global recWidth
global currentColour
x = 0
spot = pygame.mouse.get_pos()
for col in colors:
rectangle = pygame.Rect((x, 0), (recWidth, recHeight))
x += recWidth
if rectangle.collidepoint(spot):
return 'color'
for button in buttons:
plus_minus = pygame.transform.scale(button, (button_size, button_size))
plus_minus_rec = plus_minus.get_rect(topleft = (location_x, y))
y += button_size
if plus_minus_rec.collidepoint(spot):
return 'radius'
return False
def check():
global colors
global recWidth
global currentColour
global recHeight
global radius
x = 0
spot = pygame.mouse.get_pos()
for col in colors:
rectangle = pygame.Rect((x, 0), (recWidth, recHeight))
x += recWidth
if rectangle.collidepoint(spot):
currentColour = col
increase = 5
while keep_going:
for event in pygame.event.get():
if event.type == pygame.QUIT:
keep_going = False
elif event.type == pygame.MOUSEBUTTONDOWN:
spot = pygame.mouse.get_pos()
if minusRect.collidepoint(spot):
if radius > increase:
radius -= increase
if plusRect.collidepoint(spot):
if radius < max_radius:
radius += increase
if radius > max_radius:
radius -= increase
else:
mouse_down = True
elif event.type == pygame.MOUSEBUTTONUP:
mouse_down = False
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
screen.fill(BLACK)
if event.key == pygame.K_r:
radius = 5
if event.key == pygame.K_q:
if radius < max_radius:
radius += increase
if radius > max_radius:
radius -= increase
if event.key == pygame.K_a:
if radius > increase:
radius -= increase
if event.key == pygame.K_z:
screen.fill(currentColour)
if mouse_down:
# if the event is pressing the mouse
if not check_color():
# if it's not within a button, place a circle at the spot the mouse was pressed
pygame.draw.circle(screen, currentColour, spot, radius)
else:
menu()
pygame.quit()
```
#### File: Other_from_2020-2021/class2020/mm2.py
```python
import pygame
print('Based on Class_one1.py by Jonas, Apr 4, 2021; modified by <NAME>.')
# define colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
PURPLE = (251, 0, 255)
YELLOW = (255, 247, 0)
TEAL = (0, 255, 255)
ORANGE = (255, 196, 0)
LIME = (132, 255, 0)
box_size = 50 # size (in pixels) of menu boxes
h_menu = box_size # height of the menu bar (at top of the display)
colors = [BLACK, WHITE, RED, BLUE, GREEN, PURPLE, YELLOW, TEAL, ORANGE, LIME]
n_colors = len(colors)
pen_sizes = [5, 10, 15, 20, 25]
n_sizes = len(pen_sizes)
x_pixels = 1375
y_pixels = 750
currentColour = BLUE # initial background color
radius = 10 # initial radius of the circle
def mk_menu(): # display menu boxes (colors and pen-sizes)
global currentColour
screen.fill(currentColour)
x_cur = 0
for col in colors: # menu color boxes
rectangle = pygame.Rect((x_cur, 0), (box_size, box_size))
pygame.draw.rect(screen, col, rectangle)
x_cur += box_size
for pen in pen_sizes: # menu pen-size boxes
rectangle = pygame.Rect((x_cur, 0), (box_size, box_size))
pygame.draw.rect(screen, BLACK, rectangle)
pygame.draw.circle(screen, WHITE, (x_cur + box_size/2, box_size/2), pen)
x_cur += box_size
rectangle = pygame.Rect((x_cur, 0), (x_pixels, box_size))
pygame.draw.rect(screen, WHITE, rectangle)
pygame.display.update()
def mk_change(x_pos): # change either color or pen_size
global currentColour
global radius
x_org = x_pos
colors_menu = n_colors * box_size
if x_pos <= colors_menu:
col_pos = int(x_pos / box_size)
currentColour = colors[col_pos]
# print('New color, pos = ' + str(col_pos))
return
pens_menu = n_sizes * box_size
x_pos -= colors_menu
if x_pos <= pens_menu:
pen_pos = int(x_pos / box_size)
radius = pen_sizes[pen_pos]
# print('New radius, pos = ' + str(pen_pos))
else:
print('Ignore menu position: ' + str(x_org))
pygame.init()
screen = pygame.display.set_mode([x_pixels, y_pixels])
pygame.display.set_caption(
"Click the color-square to change the current color; the space-bar changes the background color")
pygame.display.update()
mk_menu()
keep_going = True
mouse_down = False
while keep_going:
for event in pygame.event.get():
if event.type == pygame.QUIT: # exit
keep_going = False
elif event.type == pygame.MOUSEBUTTONUP: # do nothing
mouse_down = False
elif event.type == pygame.MOUSEBUTTONDOWN:
spot = pygame.mouse.get_pos() # get the current position of the mouse
x = spot[0]
y = spot[1]
if y <= box_size: # in menu area
mk_change(x) # change either color or pen-size
else:
mouse_down = True
elif event.type == pygame.KEYDOWN: # keyboard events
if event.key == pygame.K_SPACE: # only the space-bar handled
screen.fill(currentColour)
mk_menu()
pygame.display.update()
if mouse_down:
spot = pygame.mouse.get_pos() # get the current position of the mouse
# print('Mouse position: ' + str(spot[0]) + ', ' + str(spot[1]))
if spot[1] > box_size:
margin = spot[1] - box_size
paint_radius = radius
if margin < radius:
paint_radius = margin
pygame.draw.circle(screen, currentColour, spot, paint_radius)
pygame.display.update()
else:
print('Painting in the menu-bar is suppressed.')
pygame.quit()
```
#### File: Other_from_2020-2021/classes/class_7.py
```python
import requests
import matplotlib.pyplot as plt
import dateutil.parser
class Weather:
def __init__(self, street, city, state, zipcode, loc_name, hour):
self.street = street
self.city = city
self.state = state
self.zipcode = zipcode
self.loc_name = loc_name
# print(self.zipcode)
if hour == 'd':
self.getForecastdaily()
self.getTempsdaily()
elif hour == 'h':
self.getForecasthourly()
self.getTempsHourly()
def getForecasthourly(self):
geocode = requests.get("https://geocoding.geo.census.gov/geocoder/locations/address?street={}&city={}&state={}&zip={}&benchmark=4&format=json".format(self.street, self.city, self.state, self.zipcode))
#print(geocode.text)
coordinates = geocode.json()['result']['addressMatches'][0]['coordinates']
gridpoints = requests.get('https://api.weather.gov/points/{},{}'.format(coordinates['y'],coordinates['x']))
#print(gridpoints.text)
self.forecast = requests.get(gridpoints.json()['properties']['forecastHourly'])
# print(self.forecast.text) # uncomment to print raw forecast info
def getTempsHourly(self):
#print(self.forecast.text)
time = str(self.forecast.json()['properties']['periods'][0]['startTime'])
time = time[-5:]
hour = int(time[:-3])
tick = 0
self.times, self.temp = [], []
for hr in self.forecast.json()['properties']['periods']:
time = str(hour) + ':00'
self.times.append(time)
self.temp.append(hr['temperature'])
hour += 1
if hour == 25:
hour = 1
tick += 1
if tick == 24:
break
def getForecastdaily(self):
geocode = requests.get("https://geocoding.geo.census.gov/geocoder/locations/address?street={}&city={}&state={}&zip={}&benchmark=4&format=json".format(self.street, self.city, self.state, self.zipcode))
#print(geocode.text)
coordinates = geocode.json()['result']['addressMatches'][0]['coordinates']
gridpoints = requests.get('https://api.weather.gov/points/{},{}'.format(coordinates['y'],coordinates['x']))
self.forecast = requests.get(gridpoints.json()['properties']['forecast'])
# print(self.forecast.text) # uncomment to print raw forecast info
def getTempsdaily(self):
self.timesDay, self.tempsDay, self.timesNight, self.tempsNight = [], [], [], []
for days in self.forecast.json()['properties']['periods']:
if days['isDaytime']:
self.timesDay.append(dateutil.parser.parse(days['startTime']))
self.tempsDay.append(days['temperature'])
else:
self.timesNight.append(dateutil.parser.parse(days['startTime']))
self.tempsNight.append(days['temperature'])
def txt_display_forecast(self):
for days in self.forecast.json()['properties']['periods']:
print('{}:\n{}{}, {}\n\n'.format(days['name'], days['temperature'], days['temperatureUnit'], days['shortForecast']))
def plt_display_forecast(item, hour):
if hour == 'd':
for lis in item:
plt.plot(lis.timesDay, lis.tempsDay, 'o-', label=lis.loc_name + ' day')
plt.plot(lis.timesNight, lis.tempsNight,'o-', label=lis.loc_name + ' night')
plt.title('temp for next seven days')
else:
for lis in item:
plt.plot(lis.times, lis.temp, 'o-', label=lis.loc_name)
plt.title('temp for 24 hours')
plt.gcf().autofmt_xdate()
plt.xlabel('days')
plt.ylabel('temp (in F)')
plt.grid()
plt.legend()
plt.show()
# get the weather of the white house
ny_near_street = '1+Scott+Ave'
ny_near_city = 'Yougstown'
ny_near_state = 'NY'
ny_near_zipcode = "14174"
ny_street = '20+W+34th+st'
ny_city = 'New York'
ny_state = 'NY'
ny_zipcode = '10001'
dc_street = "1600+Pennsylvania+Avenue+NW"
dc_city = "Washington"
dc_state = "DC"
dc_zipcode = "20500"
other_street = (input('What is the number of your loc ') + '+' + input('What is the road name ') + '+' + input('What is the dr, ave, rd, etc '))
other_city = input('What is the city name? ')
i = 0
for ob in other_city:
if ob == ' ':
other_city = other_city[:i] + '+' + other_city[i+1:]
i += 1
other_state = input('What is the name of the state ')
while True:
try:
other_zipcode = int(input('What is the zipcode '))
break
except:
print('That is not a number')
other_name = input('What is the name of this place? ')
hour = input('Would you like hourly or daily? (h/d) ')
NY_near = Weather(ny_near_street, ny_near_city, ny_near_state, ny_near_zipcode, 'NY Near', hour)
DC = Weather(dc_street, dc_city, dc_state, dc_zipcode, 'DC', hour)
NY = Weather(ny_street, ny_city, ny_state, ny_zipcode, 'NY', hour)
try:
other = Weather(other_street, other_city, other_state, other_zipcode, other_name, hour)
locs = [NY, DC, NY_near, other]
other_loc = True
except:
print('This address is invaild, countining with other address')
locs = [NY, DC, NY_near]
other_loc = False
chart = input('would you like a chart? (y/n) ')
if chart == 'y':
while True:
loc = input('What loc would you like to print? (all/DC/NY Near/NY, or if it worked, ' + other_name + ' if it worked) ')
if loc == 'all':
plt_display_forecast(locs, hour)
break
elif loc == 'DC':
plt_display_forecast([DC], hour)
break
elif loc == 'NY':
plt_display_forecast([NY], hour)
break
elif loc == 'NY Near':
plt_display_forecast([NY_near], hour)
break
if loc == other_name:
if other_loc == True:
plt_display_forecast([other], hour)
break
else:
print('sorry ' + other_name + ' is not working')
else:
while True:
loc = input('What loc would you like to print? (all/DC/NY Near/NY, or if it worked, ' + other_name + ' if it worked) ')
if loc == 'NY':
NY.txt_display_forecast()
break
elif loc == 'DC':
DC.txt_display_forecast()
break
elif loc == 'NY Near':
NY_near.display_forecast()
break
if loc == other_name:
if other_loc == True:
plt_display_forecast([other], hour)
break
else:
print('sorry ' + other_name + ' is not working')
```
#### File: Other_from_2020-2021/home/search.py
```python
import tkinter as tk
class search(tk.Frame):
def __init__(self, items, master=None):
super().__init__(master)
self.master = master
self.items = items
self.pack()
self.cteate()
def create(self):
self.entry1 = tk.Entry(self.master, justify='right')
self,entry1.place(x=0, y=100, width=1000, height=25)
```
#### File: Other_from_2020-2021/home/smartness_test.py
```python
a = []
print('WELCOME TO MY SMARTNESS TEST')
def q(qu, ae):
qe = input(qu + '\n')
if qe == ae:
return True
if qe != ae:
return False
c = '' # question
d = '' # question (3 * 3)
def q_idea():
from random import randint as r
num = r(2, 3)
nums = []
if num == 2:
num_1 = r(10, 100)
num_2 = r(50, 150)
nums.append(num_1)
nums.append(num_2)
if num == 3:
num_1 = r(10, 100)
num_2 = r(50, 150)
num_3 = r(100, 200)
nums.append(num_1)
nums.append(num_2)
nums.append(num_3)
def question_def(opr):
if len(nums) == 2:
questionr = ('What is ' + str(nums[0]) + opr + str(nums[1]) + '?')
if len(nums) == 3:
questionr = ('What is ' + str(nums[0]) + opr + str(nums[1]) + opr + str(nums[2]) + '?')
return questionr
op = r(1, 4)
if op == 1:
question = question_def(' + ')
elif op == 2:
question = question_def(' - ')
elif op == 3:
question = question_def(' * ')
elif op == 4:
question = question_def(' / ')
global c
global d
c = question
if len(nums) == 2:
if op == 1:
d = nums[0] + nums[1]
if op == 2:
d = nums[0] - nums[1]
if op == 3:
d = nums[0] * nums[1]
if op == 4:
d = nums[0] / nums[1]
elif len(nums) == 3:
if op == 1:
d = nums[0] + nums[1] + nums[2]
if op == 2:
d = nums[0] - nums[1] - nums[2]
if op == 3:
d = nums[0] * nums[1] * nums[2]
if op == 4:
d = nums[0] / nums[1] / nums[2]
for i in range(9):
q_idea()
a.append(q(c, d))
i = 0
for item in a:
if item:
i += 1
rank = 'terrible.'
if i >= 1:
rank = 'bad.'
if i >= 3:
rank = 'ok.'
if i >= 5:
rank = 'pretty good.'
if i >= 7:
rank = 'good.'
if i >= 8:
rank = 'great!'
if i >= 9:
rank = 'Outstanding!!'
print('you ranked ' + rank)
```
#### File: Other_from_2020-2021/home/what_is_your_name.py
```python
import sys
import os
try:
pyWrkspLoc = os.environ["PYWRKSP"]
except KeyError:
pyWrkspLoc = os.environ["HOME"] + input('Since you do not have the PYWRSKP env var '
'\nPlease enter the pwd for the pyWrskp repo not including the '
'"home" section')
class Name:
def __init__(self, name, pyWrskp):
self.name = name
self.pyWrskp = pyWrskp
self.fun_stuff()
def hello_world(self):
print('Hello World')
print('Your name is {}!'.format(self.name))
def lola_is_the_best(self):
for i in range(999):
print('Lola is the best')
def name(self):
sys.path.append(self.pyWrskp + '/src/game')
from game import Game
g = Game
def fun_stuff(self):
option = input('What do you want to do {}?'.format(self.name))
if option == 'hello world':
self.hello_world()
elif option == 'lola is the best':
self.lola_is_the_best()
elif option == 'game':
self.name()
n = Name('Jonah')
```
#### File: src/other/shopping_list.py
```python
import json
import os
try:
pyWrkspLoc = os.environ["PYWRKSP"]
except KeyError:
pyWrkspLoc = os.environ["HOME"] + input('Since you do not have the PYWRSKP env var '
'\nPlease enter the pwd for the pyWrskp repo not including the '
'"home" section')
class ShoppingList:
def __init__(self, py_wrskp):
self.name = py_wrskp + '/docs/txt-files/shopping_list.txt'
self.list = self.load()
print('the current list is:')
self.show()
t = input('what would you like to do?')
if t == 'add':
self.add()
print('The list is now:')
self.show()
elif t == 'rm':
self.rm()
print('The list is now:')
self.show()
self.save()
def load(self):
try:
with open(self.name) as json_file:
j = json.load(json_file)
except FileNotFoundError:
print('This file does not exist')
exit(5)
return j
def add(self):
item = input('What is the name of the object you want to add to your list?')
self.list.append(item)
def rm(self):
item = input('What is the name of teh item you would like to remove, make sure this is right')
if item == 'all':
self.list =[]
else:
for i in range(len(self.list)):
if self.list[i] == item:
del self.list[i]
break
def show(self):
for item in self.list:
print(item)
def save(self):
with open(self.name, 'w') as outfile:
json.dump(self.list, outfile)
if __name__ == "__main__":
shopping_list = ShoppingList(pyWrkspLoc)
```
#### File: src/other/team_maker.py
```python
import os
from random import randint
import json
try:
pyWrkspLoc = os.environ["PYWRKSP"]
except KeyError:
pyWrkspLoc = os.environ["HOME"] + input('Since you do not have the PYWRSKP env var '
'\nPlease enter the pwd for the pyWrskp repo not including the '
'"home" section')
class TeamMaker:
def __init__(self, loc, t=None, d=None, pr=True):
self.name = loc + '/docs/txt-files/team_maker_save.txt'
change_t = False
if t is None:
change_t = True
while True:
if change_t:
t = self.ask_t()
if t == '2 teams':
self.two_teams()
if pr:
self.show()
break
elif t == '4 teams':
self.four_teams()
if pr:
self.show()
break
elif t == 'partners':
self.partners()
if pr:
self.show()
break
elif t == '2 teams + captions':
self.two_teams()
self.chose_caption()
if pr:
self.show()
break
elif t == '4 teams + captions':
self.four_teams()
self.chose_caption()
if pr:
self.show()
break
elif t == 'partners + captions':
self.partners()
self.chose_caption()
if pr:
self.show()
break
else:
if change_t is False:
print("As this option can't be changed, ending program")
exit()
print('This value is not allowed, please try again')
print('The options are:')
print('"2 teams"')
print('"4 teams"')
print('"partners"')
print('or if you add "+ captions" to any of them you will get one caption per team')
print('asking again\n\n')
if d is None:
d = self.ask_l()
self.d = d
self.teams = []
def ask_t(self):
t = input('What type would you like?\n')
return t
def ask_l(self):
d = []
load = input('would you like to load the list? (y/n)')
if load == 'n':
print("Enter a list of the people's names if nothing is entered, the list will stop, you must include "
"more than one name")
while True:
l_add = input('')
if l_add == '':
if len(d) != 1:
break
elif l_add != '':
d.append(l_add)
save = input('would you like to save this list? (y/n)')
if save == 'y':
self.save_list(d)
elif load == 'y':
d = self.load()
else:
print('{} is an word/char that this code does not allow'.format(load))
exit(404)
return d
def two_teams(self):
team_1 = []
team_2 = []
while len(self.d) > 1:
person1 = randint(0, len(self.d) - 1)
team_1.append(self.d[person1])
del self.d[person1]
person2 = randint(0, len(self.d) - 1)
team_2.append(self.d[person2])
del self.d[person2]
if len(self.d) == 1:
print('you have and uneven amount, adding {} to team 1'.format(self.d[0]))
team_1.append(self.d[0])
self.teams.append(team_1)
self.teams.append(team_2)
def four_teams(self):
team_1 = []
team_2 = []
team_3 = []
team_4 = []
while len(self.d) > 3:
person1 = randint(0, len(self.d) - 1)
team_1.append(self.d[person1])
del self.d[person1]
person2 = randint(0, len(self.d) - 1)
team_2.append(self.d[person2])
del self.d[person2]
person3 = randint(0, len(self.d) - 1)
team_3.append(self.d[person3])
del self.d[person3]
person4 = randint(0, len(self.d) - 1)
team_4.append(self.d[person4])
del self.d[person4]
if len(self.d) == 1:
team_1.append(self.d[0])
elif len(self.d) == 2:
team_1.append(self.d[0])
team_2.append(self.d[1])
elif len(self.d) == 3:
team_1.append(self.d[0])
team_2.append(self.d[1])
team_3.append(self.d[2])
self.teams.append(team_1)
self.teams.append(team_2)
self.teams.append(team_3)
self.teams.append(team_4)
def partners(self):
while len(self.d) >= 2:
person1 = randint(0, len(self.d) - 1)
person_1_name = self.d[person1]
del self.d[person1]
person2 = randint(0, len(self.d) - 1)
person_2_name = self.d[person2]
del self.d[person2]
self.teams.append([person_1_name, person_2_name])
if len(self.d) == 1:
print('You have an uneven amount of people')
print('I am making a group of three')
self.teams[0].append(self.d[0])
def chose_caption(self):
for item in self.teams:
caption_num = randint(0, len(item) - 1)
caption_name = item[caption_num]
del item[caption_num]
item.append(caption_name + ' is Caption of the team')
def load(self):
try:
with open(self.name) as json_file:
j = json.load(json_file)
except FileNotFoundError:
print('This file, where the save is does not exist, to use this program make a file at {}.'
.format(self.name))
exit(5)
return j
def save_list(self, d):
with open(self.name, 'w') as outfile:
json.dump(d, outfile)
def show(self):
team_num = 1
for item_large in self.teams:
print('Team {}'.format(team_num))
for item in item_large:
print(item)
print('\n')
team_num += 1
if __name__ == "__main__":
teams = TeamMaker(pyWrkspLoc)
```
#### File: src/other/txt_editor.py
```python
class TxtReader:
def __init__(self):
self.file_loc = input('Enter the path for this .txt file\nOr enter create for a new file')
if self.file_loc == 'create':
self.file_loc = input('What is the path for the new file you want to make')
self.write('', self.file_loc)
print('An empty file should be created at that loc.')
self.mainloop()
def mainloop(self):
while True:
do = input('What do you want to do?')
if do == 'wipe & write' or do == 'w&w':
self.wipe_and_write()
break
elif do == 'add' or do == 'a':
self.add()
break
elif do == 'read' or do == 'r':
self.show()
break
else:
print('{} is not an option'.format(do))
print('Here are the options:')
print('\t"wipe & write"')
print('\t\t"w&w"')
print('\t"add"')
print('\t\t"a"')
print('\t"read"')
print('\t\t"r"')
def show(self):
write = self.read(self.file_loc)
print('The current file is:')
print(write)
def add(self):
write = self.read(self.file_loc)
print('The current file is:')
print(write)
write_add = input('What do you wish to add? (use "\ n"\n')
write_all = write + write_add
self.write(write_all, self.file_loc)
def wipe_and_write(self):
write = input('What do you wish to write?\n')
self.write(write, self.file_loc)
def read(self, loc):
with open(loc, 'r') as inFile:
txt = inFile.read()
return txt
def write(self, item, loc):
with open(loc, 'w') as outFile:
outFile.write(item)
txt_reader = TxtReader()
```
#### File: src/super_safe_note_sender/sender.py
```python
import sys
import json
import random
import os
sys.path.append('../coder-decoder')
from coder import CoderDecoder as Coder
'''
Need to pass a message to your friends?
Don't want anyone else to see it?
Use this program!
This program when you run the "create" option, will create a message.txt file, if your friend has the program, they can
decode the message!
'''
class Sender:
def __init__(self):
self.remote_coder = Coder(print_info=False)
self.remote_coder_2 = Coder(print_info=False)
self.remote_coder_3 = Coder(print_info=False)
self.remote_coder_4 = Coder(print_info=False)
self.remote_coder_5 = Coder(print_info=False)
def create_note(self):
print('What would you like the password to be?')
password = input('The person who decodes this needs to know it')
message = input('What is the message that you would like to send?')
key = random.randint(0, len(self.remote_coder.abcs) - 1)
key_for_the_key = random.randint(0, len(self.remote_coder.abcs) - 1)
destroy = input('Would you like the file to be destoryed after reading?\n'
'Will be destroyed either way if password is inputed wrong\n'
'y/n\n')
self.remote_coder.add_vars(message, key)
message = self.remote_coder.code()
self.remote_coder_2.add_vars(password, key)
password = self.remote_coder_2.code()
self.remote_coder_5.add_vars(destroy, key)
destroy = self.remote_coder_5.code()
self.remote_coder_3.add_vars(str(key), key_for_the_key)
key = self.remote_coder_3.code()
self.remote_coder_4.add_vars(str(key_for_the_key), 15)
key_for_the_key = self.remote_coder_4.code()
items = []
for i in range(5):
item = ''
for c in range(random.randint(6, 20)):
new_item = ''
while (new_item != '}' and new_item != '{') and (new_item != '[' and new_item != ']'):
new_item = self.remote_coder.abcs[random.randint(0, len(self.remote_coder.abcs) - 1)]
item += new_item
items.append(item)
save_dic = {'dshaidsh': items[0],
'asuydhausdhuashd': password,
'shadiufad': items[1],
'sdifhuegtsydftyas': message,
'g': items[2],
'asdyatsdftras': key,
'asd7r8ushdfuhja': items[3],
'd': destroy,
'fjgishuagsdiufji': items[4],
'gjfosjodjif': key_for_the_key}
with open('message.txt', 'w') as outfile:
json.dump(save_dic, outfile)
def read_note(self):
try:
with open('message.txt') as json_file:
dic = json.load(json_file)
except FileNotFoundError:
print('There is no file like this (make sure it is called message.txt)')
exit(404)
self.remote_coder_4.add_vars(dic['gjfosjodjif'], 15)
key_for_the_key = int(self.remote_coder_4.decode())
self.remote_coder_3.add_vars(dic['asdyatsdftras'], key_for_the_key)
key = int(self.remote_coder_3.decode())
password = input('What is the password?')
self.remote_coder_2.add_vars(dic['asuydhausdhuashd'], key)
password_check = self.remote_coder_2.decode()
if password != password_check:
print('password incorrect deleting file')
os.remove("message.txt")
exit(500)
self.remote_coder.add_vars(dic['sdifhuegtsydftyas'], key)
message = self.remote_coder.decode()
self.remote_coder_5.add_vars(dic['d'], key)
destroy = self.remote_coder_5.decode()
print('The message in this file is:')
print(message)
if destroy == 'y':
print('destroying file')
os.remove('message.txt')
else:
print('The person who sent you this .txt file has decieded that it is not nessary to delete the file,')
print('Though you may do so if you want')
if __name__ == '__main__':
sender = Sender()
do = input('What do you wish to do?')
if do == 'create':
sender.create_note()
elif do == 'read':
sender.read_note()
``` |
{
"source": "jonahobw/shrinkbench",
"score": 2
} |
#### File: shrinkbench/models/__init__.py
```python
import os
import pathlib
from .head import replace_head
from .mnistnet import MnistNet
from .cifar_resnet import resnet20, resnet32, resnet44, resnet56, resnet110, resnet1202
from .cifar_vgg import vgg_bn_drop, vgg_bn_drop_100
def model_args(model: str) -> {}:
"""Arguments for torchvision.model constructors"""
args = {"googlenet": {"aux_logits": False}}
if model in args.keys():
return args[model]
return {}
```
#### File: shrinkbench/strategies/adversarial_magnitude.py
```python
import copy
from .magnitude import *
from ..pruning import (LayerPruning,
VisionPruning,
GradientMixin,
ActivationMixin,
AdversarialPruning,
AdversarialGradientMixin,
)
from .utils import (fraction_threshold,
fraction_mask,
map_importances,
flatten_importances,
importance_masks,
activation_importance,
map_zeros
)
class GreedyPGD(AdversarialGradientMixin, AdversarialPruning):
default_pgd_args = {"eps": 2 / 255, "eps_iter": 0.001, "nb_iter": 10, "norm": np.inf}
def __init__(self, model, dataloader, attack_kwargs, compression=1, device=None, debug=None):
attack_params = copy.deepcopy(self.default_pgd_args)
attack_params.update(attack_kwargs)
super().__init__(model=model, attack_name='pgd', dataloader=dataloader, attack_kwargs=attack_params, compression=compression, device=device, debug=debug)
def model_masks(self, prunable=None):
raise NotImplementedError("Class GreedyPGD is not a pruning method, it is inherited by other pruning "
"methods.")
class GreedyPGDGlobalMagGrad(GreedyPGD):
def model_masks(self):
"""Similar to GlobalMagGrad model_masks()"""
params = self.params()
grads = self.param_gradients(dataloader=self.dl, attack=self.attack, device=self.device, batches=self.debug)
# prune only the highest gradients wrt the loss
importances = {mod:
{p: grads[mod][p]
for p in mod_params}
for mod, mod_params in params.items()}
flat_importances = flatten_importances(importances)
threshold = fraction_threshold(flat_importances, self.fraction, largest=True)
if threshold == 0:
# there are too many 0 values in the tensor. These 0 values need to be able
# to be ranked. To do this while maintaining the previous order of the tensor,
# map 0 values to unique values
importances = {mod:
{p: map_zeros(grads[mod][p])
for p in mod_params}
for mod, mod_params in params.items()}
flat_importances = flatten_importances(importances)
threshold = fraction_threshold(flat_importances, self.fraction, largest=True)
return importance_masks(importances, threshold, largest=True, absolute=False)
class GreedyPGDGlobalMagGrad_param(GreedyPGD):
def model_masks(self):
"""Similar to GlobalMagGrad model_masks()"""
params = self.params()
grads = self.param_gradients(dataloader=self.dl, attack=self.attack, device=self.device, batches=self.debug)
# prune the highest gradient*parameter
# prune only the highest gradients wrt the loss
importances = {mod:
{p: params[mod][p] * grads[mod][p]
for p in mod_params}
for mod, mod_params in params.items()}
flat_importances = flatten_importances(importances)
threshold = fraction_threshold(flat_importances, self.fraction, largest=True)
if threshold == 0:
# there are too many 0 values in the tensor. These 0 values need to be able
# to be ranked. To do this while maintaining the previous order of the tensor,
# map 0 values to unique values
importances = {mod:
{p: map_zeros(params[mod][p] * grads[mod][p])
for p in mod_params}
for mod, mod_params in params.items()}
flat_importances = flatten_importances(importances)
threshold = fraction_threshold(flat_importances, self.fraction, largest=True)
return importance_masks(importances, threshold, largest=True, absolute=False)
class GreedyPGDLayerMagGrad(LayerPruning, GreedyPGD):
def layer_masks(self, module):
params = self.module_params(module)
grads = self.module_param_gradients(module, dataloader=self.dl, attack=self.attack, device=self.device, batches=self.debug)
importances = {param: np.abs(grads[param]) for param, value in params.items()}
masks = {param: fraction_mask(importances[param], self.fraction, largest=True, absolute=False)
for param, value in params.items() if value is not None}
return masks
``` |
{
"source": "Jonahowns/oxTorch_prototype",
"score": 2
} |
#### File: oxTorch_prototype/cgdms/modules.py
```python
import torch
import numpy as np
import torch.nn.functional as F
from box import Box
from force import ForceField
class Thermostat(torch.nn.Module):
def __init__(self, SimObj, type, timestep, temperature, device="cpu", **thrmst_params):
if type is "vel":
self.type = type
self.apply = self.apply_no_vel
neccesary_parameters = ["thermostat_cnst"]
elif type is "no_vel":
self.type = type
self.apply = self.apply_vel
neccesary_parameters = ["thermostat_cnst"]
else:
print(f"Type {type} is not supported")
self.device = device
self.temperature = temperature
self.timestep = timestep
self.therm_params = {}
for param, val in thrmst_params:
self.therm_params[param] = val
self.sim = SimObj
def apply_vel(self):
thermostat_prob = self.timestep / self.therm_params["thermostat_cnst"]
rndm_numbers = torch.random(self.sim.vels.shape[0]) < thermostat_prob
self.sim.vels[rndm_numbers] = torch.randn(3, device=self.device)
self.sim.vels[rndm_numbers] /= self.sim.masses[rndm_numbers]
def apply_no_vel(self):
thermostat_prob = self.timestep / self.therm_params["thermostat_cnst"]
rndm_numbers = torch.random(self.sim.coords.shape[0]) < thermostat_prob
self.sim.coords[rndm_numbers] = torch.randn(3, device=self.device) * self.temperature * self.timestep
# for ai in range(n_atoms):
# if torch.random(vels)
# if random() < thermostat_prob:
#
# # Actually this should be divided by the mass
# new_vel = torch.randn(3, device=device) * temperature
# vels[0, ai] = new_vel
# elif self.type == "no_vel":
# new_diff = torch.randn(3, device=device) * temperature * self.timestep
# coords_last[0, ai] = coords[0, ai] - new_diff
class Integrator(torch.nn.Module):
def __init__(self, Simobj, type, timestep, temperature, masses, device="cpu", otherparams=None):
supported_types = ['vel', 'langevin', 'langevin_simple']
if type in supported_types:
self.type = type
else:
print(f"Integrator type {type} not supported")
self.sim = Simobj # lets us access the Simulation Tensors
if otherparams != None:
self.otherparams = otherparams
if self.type == "vel":
self.first_step = self.first_step_vel
self.second_step = self.second_step_vel
if self.type == "no_vel":
self.first_step = None
self.second_step = self.second_step_no_vel
elif self.type == "langevin":
self.first_step = self.first_step_langevin
self.second_step = self.second_step_langevin
elif self.type == "langevin_simple":
self.first_step = self.first_step_langevin_simple
self.second_step = self.second_step_langevin_simple
self.temp = temperature
self.masses = masses
self.timestep = timestep
self.device = device
def first_step_vel(self):
self.sim.coords = self.sim.coords + self.sim.vels * self.timestep + 0.5 * self.sim.accs_last * self.timestep * self.timestep
def first_step_langevin(self):
alpha, twokbT = self.otherparams['thermostat_const'], self.otherparams['temperature']
beta = np.sqrt(twokbT * alpha * self.timestep) * torch.randn(self.sim.vels.shape, device=self.device)
b = 1.0 / (1.0 + (alpha * self.timestep) / (2 * self.masses.unsqueeze(2)))
self.sim.coords_last = self.sim.coords # ?
self.sim.coords = self.sim.coords + b * self.timestep * self.sim.vels + 0.5 * b * (self.timestep ** 2) * self.sim.accs_last + 0.5 * b * self.timestep * beta / self.sim.masses.unsqueeze(2)
def first_step_langevin_simple(self):
self.sim.coords = self.sim.coords + self.sim.vels * self.timestep + 0.5 * self.sim.accs_last * self.timestep * self.timestep
def second_step_vel(self):
self.sim.vels = self.sim.vels + 0.5 * (self.sim.accs_last + self.sim.accs) * self.timestep
self.sim.accs_last = self.sim.accs
def second_step_no_vel(self):
coords_next = 2 * self.sim.coords - self.sim.coords_last + self.sim.accs * self.timestep * self.timestep
self.sim.coords_last = self.sim.coords
self.sim.coords = coords_next
def second_step_langevin(self):
# From Gronbech-Jensen 2013
self.sim.vels = self.sim.vels + 0.5 * self.timestep * (self.sim.accs_last + self.sim.accs) - self.otherparams['alpha'] * (self.sim.coords - self.sim.coords_last) / self.sim.masses.unsqueeze(
2) + self.otherparams['beta'] / self.sim.masses.unsqueeze(2)
self.sim.accs_last = self.sim.accs
def second_step_langevin_simple(self):
gamma, twokbT = self.otherparams['thermostat_const'], self.otherparams['temperature']
self.sim.accs = self.sim.accs + (-gamma * self.sim.vels + np.sqrt(gamma * twokbT) * torch.randn(self.sim.vels.shape,
device=self.device)) / self.sim.masses.unsqueeze(2)
self.sim.vels = self.sim.vels + 0.5 * (self.sim.accs_last + self.sim.accs) * self.timestep
self.sim.accs_last = self.sim.accs
# example kinetic_energy 10
class Reporter(torch.nn.Module): # prints out observables etc.
def __init__(self, Simobj, reportdict):
super(Reporter, self).__init__()
self.sim = Simobj
self.keys = []
self.freq = []
supportedreports = ['kinetic_energy', 'step']
for key, item in reportdict:
if key in supportedreports:
self.keys.append(key)
self.freq.append(item)
self.functiondict = {'kinetic_energy':self.kinetic_energy, }
def report(self):
for freq in self.freq:
if self.sim.step
def kinetic_energy(self):
# Differentiable molecular simulation of proteins with a coarse-grained potential
class Simulator(torch.nn.Module):
"""
Parameters is a Dictionary of Tensors that will be learned
ex. {bond_constants : torch.tensor}
Application is a Dictionary defining how the tensors will be applied to the simulation data
"""
def __init__(self, particledict, parameterdict, applicationdict,
forcefield_spec, thermostatdict, reportdict, box_size, device='cpu'):
super(Simulator, self).__init__()
# self.params = {}
# self.application = {}
# for key, item in parameterdict:
# self.params[key] = torch.nn.Parameter(item)
# for key, item in applicationdict:
# self.application[key] = item
self.masses = particledict.masses
self.coords = particledict.coords
# Intialize Tensors which are edited in Integrator and Thermostat Object
if thermostatdict['type'] != "no_vel":
self.vels = torch.randn(self.coords.shape, device=device) * thermostatdict['start_temperature']
self.accs_last = torch.zeros(self.coords.shape, device=device)
self.accs = torch.zeros(self.coords.shape, device=device)
else:
self.accs = torch.zeros(self.coords.shape, device=device)
self.coords_last = self.coords.clone() + torch.randn(self.coords.shape, device=device) * \
thermostatdict['start_temperature'] * thermostatdict['timestep']
self.Thermostat = Thermostat(self, thermostatdict['type'], thermostatdict['timestep'], thermostatdict['temperature'],
thermostatdict['thermostatparams'], device=device)
self.Integrator = Integrator(self, thermostatdict['type'], thermostatdict['time'], thermostatdict['timestep'],
thermostatdict['temperature'], otherparams=thermostatdict['thermostatparams'],
device=device)
self.Reporter = Reporter(self, reportdict)
self.System_Observables = System_Obervables
self.Force_Field = ForceField(forcefield_spec, particledict)
self.Box = Box(box_size, device=device)
# self.ff_distances = torch.nn.Parameter(ff_distances)
# self.ff_angles = torch.nn.Parameter(ff_angles)
# self.ff_dihedrals = torch.nn.Parameter(ff_dihedrals)
# def sim_step_novel(self, coords, masses,):
def center_system(self):
center = self.Box/2
current_com = torch.mean(self.coords*F.normalize(self.massses))
self.coords.add_(center - current_com)
# returns difference vectors in matrix form for all coordinates and enforces minimum image convention
# vector from p0 to p1 = min_image[0][1]
def min_image(self, coords):
box_size = self.Box[0] # Assumes Cubic Box at least for now
n_atoms = coords.shape[0]
tmp = coords.unsqueeze(1).expand(-1, n_atoms, -1)
diffs = tmp - tmp.transpose(0, 1)
min_image = diffs - torch.round(diffs / box_size) * box_size
return min_image
# Returns Distances b/t all particles as a symmetric matrixd
def distances(selfs, min_image):
return min_image.norm(dim=2)
# Returns Matrix of normalized vectors ex. vectors[0][1] returns the normalized vector from particle 0 pointing at particle 1
def vectors(self, min_image):
return F.normalize(min_image, dim=2)
def sim_step_vel(self, n_steps, integrator="vel", device="cpu", start_temperature=0.1, timestep=0.02,
verbosity = 0,
thermostat_const=0.0, # Set to 0.0 to run without a thermostat (NVE ensemble)
temperature=0.0, # The effective temperature of the thermostat
):
for i in range(n_steps):
self.Integrator.first_step()
min_image = self.min_image(self.coords)
distances = self.distances(min_image)
vectors = self.vectors(min_image)
min_image_mc = min_image[self.mc_mask]
distances_mc = self.distances(min_image_mc)
vectors_mc = self.vectors(min_image_mc)
#force_calculation, return the accs f/mass
# return energy here as well
# F, U = self.Force_Field.compute_forces(distances, vectors, distances_mc, vectors_mc)
self.accs = F/self.masses
self.Integrator.second_step()
self.Thermostat.apply()
self.Reporter.report()
def forward(self,
coords,
orientations,
inters_flat,
inters_ang,
inters_dih,
,
seq,
native_coords,
n_steps,
integrator="vel", # vel/no_vel/min/langevin/langevin_simple
timestep=0.02,
start_temperature=0.1,
thermostat_const=0.0, # Set to 0.0 to run without a thermostat (NVE ensemble)
temperature=0.0, # The effective temperature of the thermostat
sim_filepath=None, # Output PDB file to write to or None to not write out
energy=False, # Return the energy at the end of the simulation
report_n=10_000, # Print and write PDB every report_n steps
verbosity=2, # 0 for epoch info, 1 for protein info, 2 for simulation step info
):
assert integrator in ("vel", "no_vel", "min", "langevin", "langevin_simple"), f"Invalid integrator {integrator}"
device = coords.device
batch_size, n_atoms = masses.size(0), masses.size(1)
n_res = n_atoms // len(atoms)
dist_bin_centres_tensor = torch.tensor(dist_bin_centres, device=device)
pair_centres_flat = dist_bin_centres_tensor.index_select(0, inters_flat[0]).unsqueeze(0).expand(batch_size, -1, -1)
pair_pots_flat = self.ff_distances.index_select(0, inters_flat[0]).unsqueeze(0).expand(batch_size, -1, -1)
angle_bin_centres_tensor = torch.tensor(angle_bin_centres, device=device)
angle_centres_flat = angle_bin_centres_tensor.unsqueeze(0).unsqueeze(0).expand(batch_size, n_res, -1)
angle_pots_flat = self.ff_angles.index_select(1, inters_ang[0]).unsqueeze(0).expand(batch_size, -1, -1, -1)
dih_bin_centres_tensor = torch.tensor(dih_bin_centres, device=device)
dih_centres_flat = dih_bin_centres_tensor.unsqueeze(0).unsqueeze(0).expand(batch_size, n_res - 1, -1)
dih_pots_flat = self.ff_dihedrals.index_select(1, inters_dih[0]).unsqueeze(0).expand(batch_size, -1, -1, -1)
native_coords_ca = native_coords.view(batch_size, n_res, 3 * len(atoms))[0, :, 3:6]
model_n = 0
# just preparing needed vectors
if integrator == "vel" or integrator == "langevin" or integrator == "langevin_simple":
vels = torch.randn(coords.shape, device=device) * start_temperature
accs_last = torch.zeros(coords.shape, device=device)
elif integrator == "no_vel":
coords_last = coords.clone() + torch.randn(coords.shape, device=device) * start_temperature * timestep
# The step the energy is return on is not used for simulation so we add an extra step
if energy:
n_steps += 1
for i in range(n_steps):
# MD Backend First step
if integrator == "vel":
coords = coords + vels * timestep + 0.5 * accs_last * timestep * timestep
elif integrator == "langevin":
# From Gronbech-Jensen 2013
alpha, twokbT = thermostat_const, temperature
beta = np.sqrt(twokbT * alpha * timestep) * torch.randn(vels.shape, device=device)
b = 1.0 / (1.0 + (alpha * timestep) / (2 * self.masses.unsqueeze(2)))
coords_last = coords
coords = coords + b * timestep * vels + 0.5 * b * (timestep ** 2) * accs_last + 0.5 * b * timestep * beta / masses.unsqueeze(2)
elif integrator == "langevin_simple":
coords = coords + vels * timestep + 0.5 * accs_last * timestep * timestep
# See https://arxiv.org/pdf/1401.1181.pdf for derivation of forces.py
printing = verbosity >= 2 and i % report_n == 0
returning_energy = energy and i == n_steps - 1
if printing or returning_energy:
dist_energy = torch.zeros(1, device=device)
angle_energy = torch.zeros(1, device=device)
dih_energy = torch.zeros(1, device=device)
# Add pairwise distance forces.py
crep = coords.unsqueeze(1).expand(-1, n_atoms, -1, -1) # makes list of coords like [[ [coord1] n times ], [coord2] n times], [coord3] n times]]
diffs = crep - crep.transpose(1, 2)
dists = diffs.norm(dim=3)
dists_flat = dists.view(batch_size, n_atoms * n_atoms)
dists_from_centres = pair_centres_flat - dists_flat.unsqueeze(2).expand(-1, -1, n_bins_force)
dist_bin_inds = dists_from_centres.abs().argmin(dim=2).unsqueeze(2)
# Force is gradient of potential
# So it is proportional to difference of previous and next value of potential
pair_forces_flat = 0.5 * (pair_pots_flat.gather(2, dist_bin_inds) - pair_pots_flat.gather(2, dist_bin_inds + 2))
# Specify minimum to prevent division by zero errors
norm_diffs = diffs / dists.clamp(min=0.01).unsqueeze(3)
pair_accs = (pair_forces_flat.view(batch_size, n_atoms, n_atoms)).unsqueeze(3) * norm_diffs
accs = pair_accs.sum(dim=1) / masses.unsqueeze(2)
if printing or returning_energy:
dist_energy += 0.5 * pair_pots_flat.gather(2, dist_bin_inds + 1).sum()
atom_coords = coords.view(batch_size, n_res, 3 * len(atoms))
atom_accs = torch.zeros(batch_size, n_res, 3 * len(atoms), device=device)
# Angle forces.py
# across_res is the number of atoms in the next residue, starting from atom_3
for ai, (atom_1, atom_2, atom_3, across_res) in enumerate(angles):
ai_1, ai_2, ai_3 = atoms.index(atom_1), atoms.index(atom_2), atoms.index(atom_3)
if across_res == 0:
ba = atom_coords[:, : , (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, : , (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, : , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, : , (ai_2 * 3):(ai_2 * 3 + 3)]
# Use residue potential according to central atom
angle_pots_to_use = angle_pots_flat[:, ai, :]
elif across_res == 1:
ba = atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
angle_pots_to_use = angle_pots_flat[:, ai, :-1]
elif across_res == 2:
ba = atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
angle_pots_to_use = angle_pots_flat[:, ai, 1:]
ba_norms = ba.norm(dim=2)
bc_norms = bc.norm(dim=2)
angs = torch.acos((ba * bc).sum(dim=2) / (ba_norms * bc_norms))
n_angles = n_res if across_res == 0 else n_res - 1
angles_from_centres = angle_centres_flat[:, :n_angles] - angs.unsqueeze(2)
angle_bin_inds = angles_from_centres.abs().argmin(dim=2).unsqueeze(2)
angle_forces = 0.5 * (angle_pots_to_use.gather(2, angle_bin_inds) - angle_pots_to_use.gather(2, angle_bin_inds + 2))
cross_ba_bc = torch.cross(ba, bc, dim=2)
fa = angle_forces * normalize(torch.cross( ba, cross_ba_bc, dim=2), dim=2) / ba_norms.unsqueeze(2)
fc = angle_forces * normalize(torch.cross(-bc, cross_ba_bc, dim=2), dim=2) / bc_norms.unsqueeze(2)
fb = -fa -fc
if across_res == 0:
atom_accs[:, : , (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, : , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, : , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
elif across_res == 1:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
elif across_res == 2:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
if printing or returning_energy:
angle_energy += angle_pots_to_use.gather(2, angle_bin_inds + 1).sum()
# Dihedral forces.py
# across_res is the number of atoms in the next residue, starting from atom_4
for di, (atom_1, atom_2, atom_3, atom_4, across_res) in enumerate(dihedrals):
ai_1, ai_2, ai_3, ai_4 = atoms.index(atom_1), atoms.index(atom_2), atoms.index(atom_3), atoms.index(atom_4)
if across_res == 1:
ab = atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)]
# Use residue potential according to central atom
dih_pots_to_use = dih_pots_flat[:, di, :-1]
elif across_res == 2:
ab = atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)]
dih_pots_to_use = dih_pots_flat[:, di, 1:]
elif across_res == 3:
ab = atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] - atom_coords[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)]
bc = atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] - atom_coords[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)]
cd = atom_coords[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] - atom_coords[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)]
dih_pots_to_use = dih_pots_flat[:, di, 1:]
cross_ab_bc = torch.cross(ab, bc, dim=2)
cross_bc_cd = torch.cross(bc, cd, dim=2)
bc_norms = bc.norm(dim=2).unsqueeze(2)
dihs = torch.atan2(
torch.sum(torch.cross(cross_ab_bc, cross_bc_cd, dim=2) * bc / bc_norms, dim=2),
torch.sum(cross_ab_bc * cross_bc_cd, dim=2)
)
dihs_from_centres = dih_centres_flat - dihs.unsqueeze(2)
dih_bin_inds = dihs_from_centres.abs().argmin(dim=2).unsqueeze(2)
dih_forces = 0.5 * (dih_pots_to_use.gather(2, dih_bin_inds) - dih_pots_to_use.gather(2, dih_bin_inds + 2))
fa = dih_forces * normalize(-cross_ab_bc, dim=2) / ab.norm(dim=2).unsqueeze(2)
fd = dih_forces * normalize( cross_bc_cd, dim=2) / cd.norm(dim=2).unsqueeze(2)
# Forces on the middle atoms have to keep the sum of torques null
# Forces taken from http://www.softberry.com/freedownloadhelp/moldyn/description.html
fb = ((ab * -bc) / (bc_norms ** 2) - 1) * fa - ((cd * -bc) / (bc_norms ** 2)) * fd
fc = -fa - fb - fd
if across_res == 1:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, :-1, (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
elif across_res == 2:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, :-1, (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
elif across_res == 3:
atom_accs[:, :-1, (ai_1 * 3):(ai_1 * 3 + 3)] += fa
atom_accs[:, 1: , (ai_2 * 3):(ai_2 * 3 + 3)] += fb
atom_accs[:, 1: , (ai_3 * 3):(ai_3 * 3 + 3)] += fc
atom_accs[:, 1: , (ai_4 * 3):(ai_4 * 3 + 3)] += fd
if printing or returning_energy:
dih_energy += dih_pots_to_use.gather(2, dih_bin_inds + 1).sum()
accs += atom_accs.view(batch_size, n_atoms, 3) / masses.unsqueeze(2)
# Shortcut to return energy at a given step
if returning_energy:
return dist_energy + angle_energy + dih_energy
# Second step
if integrator == "vel":
vels = vels + 0.5 * (accs_last + accs) * timestep
accs_last = accs
elif integrator == "no_vel":
coords_next = 2 * coords - coords_last + accs * timestep * timestep
coords_last = coords
coords = coords_next
elif integrator == "langevin":
# From Gronbech-Jensen 2013
vels = vels + 0.5 * timestep * (accs_last + accs) - alpha * (coords - coords_last) / masses.unsqueeze(2) + beta / masses.unsqueeze(2)
accs_last = accs
elif integrator == "langevin_simple":
gamma, twokbT = thermostat_const, temperature
accs = accs + (-gamma * vels + np.sqrt(gamma * twokbT) * torch.randn(vels.shape, device=device)) / masses.unsqueeze(2)
vels = vels + 0.5 * (accs_last + accs) * timestep
accs_last = accs
elif integrator == "min":
coords = coords + accs * 0.1
# Apply thermostat
if integrator in ("vel", "no_vel") and thermostat_const > 0.0:
thermostat_prob = timestep / thermostat_const
for ai in range(n_atoms):
if random() < thermostat_prob:
if integrator == "vel":
# Actually this should be divided by the mass
new_vel = torch.randn(3, device=device) * temperature
vels[0, ai] = new_vel
elif integrator == "no_vel":
new_diff = torch.randn(3, device=device) * temperature * timestep
coords_last[0, ai] = coords[0, ai] - new_diff
if printing:
total_energy = dist_energy + angle_energy + dih_energy
out_line = " Step {:8} / {} - acc {:6.3f} {}- energy {:6.2f} ( {:6.2f} {:6.2f} {:6.2f} ) - Cα RMSD {:6.2f}".format(
i + 1, n_steps, torch.mean(accs.norm(dim=2)).item(),
"- vel {:6.3f} ".format(torch.mean(vels.norm(dim=2)).item()) if integrator in ("vel", "langevin", "langevin_simple") else "",
total_energy.item(), dist_energy.item(), angle_energy.item(), dih_energy.item(),
rmsd(coords.view(batch_size, n_res, 3 * len(atoms))[0, :, 3:6], native_coords_ca)[0].item())
report(out_line, 2, verbosity)
if sim_filepath and i % report_n == 0:
model_n += 1
with open(sim_filepath, "a") as of:
of.write("MODEL {:>8}\n".format(model_n))
for ri, r in enumerate(seq):
for ai, atom in enumerate(atoms):
of.write("ATOM {:>4} {:<2} {:3} A{:>4} {:>8.3f}{:>8.3f}{:>8.3f} 1.00 0.00 {:>2} \n".format(
len(atoms) * ri + ai + 1, atom[:2].upper(),
one_to_three_aas[r], ri + 1,
coords[0, len(atoms) * ri + ai, 0].item(),
coords[0, len(atoms) * ri + ai, 1].item(),
coords[0, len(atoms) * ri + ai, 2].item(),
atom[0].upper()))
of.write("ENDMDL\n")
return coords
def training_step(model_filepath, atom_ff_definitions, device="cpu", verbosity=0):
max_n_steps = 2_000
learning_rate = 1e-4
n_accumulate = 100
parameters = atom_ff_definitions.parameters
applications = atom_ff_definitions.applications
simulator = Simulator(parameters, applications)
train_set = ProteinDataset(train_proteins, train_val_dir, device=device)
val_set = ProteinDataset(val_proteins, train_val_dir, device=device)
optimizer = torch.optim.Adam(simulator.parameters(), lr=learning_rate)
report("Starting training", 0, verbosity)
for ei in count(start=0, step=1):
# After 37 epochs reset the optimiser with a lower learning rate
if ei == 37:
optimizer = torch.optim.Adam(simulator.parameters(), lr=learning_rate / 2)
train_rmsds, val_rmsds = [], []
n_steps = min(250 * ((ei // 5) + 1), max_n_steps) # Scale up n_steps over epochs
train_inds = list(range(len(train_set)))
val_inds = list(range(len(val_set)))
shuffle(train_inds)
shuffle(val_inds)
simulator.train()
optimizer.zero_grad()
for i, ni in enumerate(train_inds):
# basically need to get observables from starting info
# then
native_coords, inters_flat, inters_ang, inters_dih, masses, seq = train_set[ni]
coords = simulator(native_coords.unsqueeze(0), inters_flat.unsqueeze(0),
inters_ang.unsqueeze(0), inters_dih.unsqueeze(0), masses.unsqueeze(0),
seq, native_coords.unsqueeze(0), n_steps, verbosity=verbosity)
loss, passed = rmsd(coords[0], native_coords)
train_rmsds.append(loss.item())
if passed:
loss_log = torch.log(1.0 + loss)
loss_log.backward()
report(" Training {:4} / {:4} - RMSD {:6.2f} over {:4} steps and {:3} residues".format(
i + 1, len(train_set), loss.item(), n_steps, len(seq)), 1, verbosity)
if (i + 1) % n_accumulate == 0:
optimizer.step()
optimizer.zero_grad()
simulator.eval()
with torch.no_grad():
for i, ni in enumerate(val_inds):
native_coords, inters_flat, inters_ang, inters_dih, masses, seq = val_set[ni]
coords = simulator(native_coords.unsqueeze(0), inters_flat.unsqueeze(0),
inters_ang.unsqueeze(0), inters_dih.unsqueeze(0), masses.unsqueeze(0),
seq, native_coords.unsqueeze(0), n_steps, verbosity=verbosity)
loss, passed = rmsd(coords[0], native_coords)
val_rmsds.append(loss.item())
report(" Validation {:4} / {:4} - RMSD {:6.2f} over {:4} steps and {:3} residues".format(
i + 1, len(val_set), loss.item(), n_steps, len(seq)), 1, verbosity)
torch.save({"distances": simulator.ff_distances.data,
"angles": simulator.ff_angles.data,
"dihedrals": simulator.ff_dihedrals.data,
"optimizer": optimizer.state_dict()},
model_filepath)
report("Epoch {:4} - med train/val RMSD {:6.3f} / {:6.3f} over {:4} steps".format(
ei + 1, np.median(train_rmsds), np.median(val_rmsds), n_steps), 0, verbosity)
# Read a dataset of input files
class ProteinDataset(Dataset):
def __init__(self, pdbids, coord_dir, device="cpu"):
self.pdbids = pdbids
self.coord_dir = coord_dir
self.set_size = len(pdbids)
self.device = device
def __len__(self):
return self.set_size
def __getitem__(self, index):
fp = os.path.join(self.coord_dir, self.pdbids[index] + ".txt")
return read_input_file(fp, device=self.device)
```
#### File: oxTorch_prototype/cgdms/observables.py
```python
import torch
# Methods for calculating properties of simulation system
class Observables():
def __init__(self, observable_dict):
supported_observables = ['kinetic_energy', 'angles']
def kinetic_energy(self, ):
``` |
{
"source": "jonahpearl/face-rhythm",
"score": 2
} |
#### File: tests/integration_tests/test_session_architectures.py
```python
import cv2
import h5py
from matplotlib import pyplot as plt
from face_rhythm.util import helpers, set_roi, setup
from face_rhythm.optic_flow import optic_flow, clean_results, conv_dim_reduce
from face_rhythm.analysis import pca, spectral_analysis, tca
from face_rhythm.visualize import videos, plots
from pathlib import Path
import shutil
def run_basic(run_name):
project_path = Path('test_runs').resolve() / run_name
video_path = Path('test_data').resolve() / run_name / 'session1'
overwrite_config = True
remote = True
trials = False
multisession = False
config_filepath = setup.setup_project(project_path, video_path, run_name, overwrite_config, remote, trials,
multisession)
# VIDEO LOAD
config = helpers.load_config(config_filepath)
config['Video']['file_prefix'] = 'gmou06'
config['Video']['print_filenames'] = True
config['General']['overwrite_nwbs'] = True
helpers.save_config(config, config_filepath)
setup.prepare_videos(config_filepath)
run_downstream(config_filepath)
def run_multi(run_name):
project_path = Path('test_runs/' + run_name).resolve()
video_path = Path('test_data/' + run_name).resolve()
overwrite_config = True
remote = True
trials = False
multisession = True
config_filepath = setup.setup_project(project_path, video_path, run_name, overwrite_config, remote, trials,
multisession)
# VIDEO LOAD
config = helpers.load_config(config_filepath)
config['Video']['session_prefix'] = 'session'
config['Video']['print_filenames'] = True
config['General']['overwrite_nwbs'] = True
helpers.save_config(config, config_filepath)
setup.prepare_videos(config_filepath)
run_downstream(config_filepath)
def run_downstream(config_filepath):
# ROI Selection
config = helpers.load_config(config_filepath)
config['ROI']['session_to_set'] = 0 # 0 indexed. Chooses the session to use
config['ROI']['vid_to_set'] = 0 # 0 indexed. Sets the video to use to make an image
config['ROI']['frame_to_set'] = 1 # 0 indexed. Sets the frame number to use to make an image
config['ROI']['load_from_file'] = True # if you've already run this and want to use the existing ROI, set to True
helpers.save_config(config, config_filepath)
# special line to just grab the points
with h5py.File(Path('test_data/pts_all.h5'), 'r') as pt:
pts_all = helpers.h5_to_dict(pt)
for session in config['General']['sessions']:
helpers.save_pts(session['nwb'], pts_all)
# Optic Flow
config = helpers.load_config(config_filepath)
config['Optic']['vidNums_toUse'] = [0]
config['Optic']['spacing'] = 16
config['Optic']['showVideo_pref'] = False
config['Video']['printFPS_pref'] = False
config['Video']['fps_counterPeriod'] = 10
config['Video']['dot_size'] = 1
config['Video']['save_demo'] = False
config['Video']['demo_len'] = 10
config['Optic']['lk'] = {}
config['Optic']['lk']['winSize'] = (15, 15)
config['Optic']['lk']['maxLevel'] = 2
config['Optic']['lk']['criteria'] = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 3, 0.001)
config['Optic']['recursive'] = False
config['Optic']['recursive_relaxation_factor'] = 0.005
config['Optic']['multithread'] = False
helpers.save_config(config, config_filepath)
optic_flow.optic_workflow(config_filepath)
# Clean Up
config = helpers.load_config(config_filepath)
config['Clean']['outlier_threshold_positions'] = 25
config['Clean']['outlier_threshold_displacements'] = 4
config['Clean']['framesHalted_beforeOutlier'] = 4
config['Clean']['framesHalted_afterOutlier'] = 2
config['Clean']['relaxation_factor'] = 0.005
helpers.save_config(config, config_filepath)
clean_results.clean_workflow(config_filepath)
# Visualize
config = helpers.load_config(config_filepath)
config['Video']['demo_len'] = 10
config['Video']['data_to_display'] = 'positions_cleanup_absolute'
config['Video']['save_demo'] = True
helpers.save_config(config, config_filepath)
videos.visualize_points(config_filepath)
# ConvDR
config = helpers.load_config(config_filepath)
config['CDR']['width_cosKernel'] = 48
config['CDR']['num_dots'] = config['Optic']['num_dots']
config['CDR']['spacing'] = 16
config['CDR']['display_points'] = False
config['CDR']['vidNum'] = 0
config['CDR']['frameNum'] = 1
config['CDR']['dot_size'] = 1
config['CDR']['kernel_alpha'] = 0.3
config['CDR']['kernel_pixel'] = 10
config['CDR']['num_components'] = 3
helpers.save_config(config, config_filepath)
conv_dim_reduce.conv_dim_reduce_workflow(config_filepath)
# Visualize
config = helpers.load_config(config_filepath)
config['Video']['demo_len'] = 10
config['Video']['data_to_display'] = 'positions_convDR_absolute'
config['Video']['save_demo'] = True
helpers.save_config(config, config_filepath)
videos.visualize_points(config_filepath)
pca.pca_workflow(config_filepath, 'positions_convDR_absolute')
config = helpers.load_config(config_filepath)
config['PCA']['n_factors_to_show'] = 3
helpers.save_config(config, config_filepath)
plots.plot_pca_diagnostics(config_filepath)
plt.close('all')
# Visualize PCs
config = helpers.load_config(config_filepath)
config['Video']['factor_category_to_display'] = 'PCA'
config['Video']['factor_to_display'] = 'factors_points'
config['Video']['points_to_display'] = 'positions_convDR_absolute'
config['Video']['demo_len'] = 10
config['Video']['dot_size'] = 2
config['Video']['save_demo'] = True
helpers.save_config(config, config_filepath)
videos.visualize_factor(config_filepath)
# Positional TCA
config = helpers.load_config(config_filepath)
config['TCA']['pref_useGPU'] = False
config['TCA']['rank'] = 4
config['TCA']['init'] = 'random'
config['TCA']['tolerance'] = 1e-06
config['TCA']['verbosity'] = 0
config['TCA']['n_iters'] = 100
helpers.save_config(config, config_filepath)
tca.positional_tca_workflow(config_filepath, 'positions_convDR_meanSub')
config = helpers.load_config(config_filepath)
config['TCA']['ftype'] = 'positional'
helpers.save_config(config, config_filepath)
plots.plot_tca_factors(config_filepath)
plt.close('all')
config = helpers.load_config(config_filepath)
config['Video']['factor_category_to_display'] = 'TCA'
config['Video']['factor_to_display'] = 'factors_positional_points'
config['Video']['points_to_display'] = 'positions_convDR_absolute'
config['Video']['demo_len'] = 10
config['Video']['dot_size'] = 2
config['Video']['save_demo'] = True
helpers.save_config(config, config_filepath)
videos.visualize_factor(config_filepath)
# CQT
config = helpers.load_config(config_filepath)
config['CQT']['hop_length'] = 16
config['CQT']['fmin_rough'] = 1.8
config['CQT']['sampling_rate'] = config['Video']['Fs']
config['CQT']['n_bins'] = 35
helpers.save_config(config, config_filepath)
spectral_analysis.prepare_freqs(config_filepath)
spectral_analysis.cqt_workflow(config_filepath, 'positions_convDR_meanSub')
config = helpers.load_config(config_filepath)
config['CQT']['pixelNum_toUse'] = 10
helpers.save_config(config, config_filepath)
plots.plot_cqt(config_filepath)
plt.close('all')
# Spectral TCA
config = helpers.load_config(config_filepath)
config['TCA']['pref_useGPU'] = False
config['TCA']['rank'] = 8
config['TCA']['init'] = 'random'
config['TCA']['tolerance'] = 1e-06
config['TCA']['verbosity'] = 0
config['TCA']['n_iters'] = 100
helpers.save_config(config, config_filepath)
tca.full_tca_workflow(config_filepath, 'positions_convDR_meanSub')
config = helpers.load_config(config_filepath)
config['TCA']['ftype'] = 'spectral'
helpers.save_config(config, config_filepath)
plots.plot_tca_factors(config_filepath)
plt.close('all')
config = helpers.load_config(config_filepath)
config['Video']['factor_category_to_display'] = 'TCA'
config['Video']['factor_to_display'] = 'factors_spectral_points'
config['Video']['points_to_display'] = 'positions_convDR_absolute'
config['Video']['demo_len'] = 10
config['Video']['dot_size'] = 2
config['Video']['save_demo'] = True
helpers.save_config(config, config_filepath)
videos.visualize_factor(config_filepath)
config = helpers.load_config(config_filepath)
config['Video']['factor_category_to_display'] = 'TCA'
config['Video']['factor_to_display'] = 'factors_spectral_points'
config['Video']['points_to_display'] = 'positions_convDR_absolute'
config['Video']['start_vid'] = 0
config['Video']['start_frame'] = 0
config['Video']['demo_len'] = 10
config['Video']['dot_size'] = 2
config['Video']['save_demo'] = True
config['Video']['factors_to_show'] = []
config['Video']['show_alpha'] = True
config['Video']['pulse_test_index'] = 0
helpers.save_config(config, config_filepath)
videos.face_with_trace(config_filepath)
plt.close('all')
# Cleanup
shutil.rmtree(config['Paths']['project'])
def test_single_session_single_video():
run_name = 'single_session_single_video'
run_multi(run_name)
def test_single_session_multi_video():
run_name = 'single_session_multi_video'
run_multi(run_name)
def test_multi_session_single_video():
run_name = 'multi_session_single_video'
run_multi(run_name)
def test_multi_session_multi_video():
run_name = 'multi_session_multi_video'
run_multi(run_name)
def test_basic_single_video():
run_name = 'single_session_single_video'
run_basic(run_name)
def test_basic_multi_video():
run_name = 'single_session_multi_video'
run_basic(run_name)
``` |
{
"source": "jonahrosenblum/pastepwn",
"score": 3
} |
#### File: pastepwn/actions/ircaction.py
```python
import socket
from pastepwn.util import TemplatingEngine
from .basicaction import BasicAction
class IrcAction(BasicAction):
"""Action to send an irc message to a certain channel"""
name = "IrcAction"
irc = socket.socket()
def __init__(
self=None,
server=None,
channel=None,
port=6667,
nick="pastepwn",
template=None
):
super().__init__()
self.ircsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server = server
self.channel = channel
self.port = port
self.nick = nick
self.template = template
def perform(self, paste, analyzer_name=None, matches=None):
"""Perform the action on the passed paste"""
text = TemplatingEngine.fill_template(paste, analyzer_name, template_string=self.template, matches=matches)
self.ircsock.connect((self.server, self.port))
self.ircsock.send(bytes("USER " + self.nick + " " + self.nick + " " + self.nick + "n", "UTF-8"))
self.ircsock.send(bytes("NICK " + self.nick + "n", "UTF-8"))
self.ircsock.send(bytes("JOIN " + self.channel + "n", "UTF-8"))
self.ircsock.send(bytes("PRIVMSG " + self.channel + " " + text + "n", "UTF-8"))
self.ircsock.send(bytes("QUIT n", "UTF-8"))
```
#### File: pastepwn/actions/telegramaction.py
```python
import logging
import re
from pastepwn.util import Request, TemplatingEngine
from .basicaction import BasicAction
class TelegramAction(BasicAction):
"""Action to send a Telegram message to a certain user or group"""
name = "TelegramAction"
def __init__(self, token, receiver, template=None):
super().__init__()
self.logger = logging.getLogger(__name__)
if not re.match(r"[0-9]+:[a-zA-Z0-9\-_]+", token) or token is None:
raise ValueError("Bot token not correct or None!")
self.token = token
self.receiver = receiver
self.template = template
def perform(self, paste, analyzer_name=None, matches=None):
"""Send a message via a Telegram bot to a specified user, without checking for errors"""
r = Request()
text = TemplatingEngine.fill_template(paste, analyzer_name, template_string=self.template, matches=matches)
api_url = "https://api.telegram.org/bot{0}/sendMessage?chat_id={1}&text={2}".format(self.token, self.receiver, text)
r.get(api_url)
``` |
{
"source": "Jonahss/python-client",
"score": 2
} |
#### File: webdriver/extensions/location.py
```python
from selenium import webdriver
from ..mobilecommand import MobileCommand as Command
class Location(webdriver.Remote):
def toggle_location_services(self):
"""Toggle the location services on the device. Android only.
"""
self.execute(Command.TOGGLE_LOCATION_SERVICES, {})
return self
def set_location(self, latitude, longitude, altitude):
"""Set the location of the device
:Args:
- latitude - String or numeric value between -90.0 and 90.00
- longitude - String or numeric value between -180.0 and 180.0
- altitude - String or numeric value
"""
data = {
"location": {
"latitude": float(latitude),
"longitude": float(longitude),
"altitude": float(altitude)
}
}
self.execute(Command.SET_LOCATION, data)
return self
@property
def location(self):
"""Retrieves the current location
:return:
A dictionary whose keys are
- latitude
- longitude
- altitude
"""
return self.execute(Command.GET_LOCATION)['value']
# pylint: disable=protected-access
def _addCommands(self):
self.command_executor._commands[Command.TOGGLE_LOCATION_SERVICES] = \
('POST', '/session/$sessionId/appium/device/toggle_location_services')
self.command_executor._commands[Command.GET_LOCATION] = \
('GET', '/session/$sessionId/location')
self.command_executor._commands[Command.SET_LOCATION] = \
('POST', '/session/$sessionId/location')
```
#### File: webdriver/extensions/remote_fs.py
```python
import base64
from selenium import webdriver
from selenium.common.exceptions import InvalidArgumentException
from ..mobilecommand import MobileCommand as Command
class RemoteFS(webdriver.Remote):
def pull_file(self, path):
"""Retrieves the file at `path`. Returns the file's contents as base64.
:Args:
- path - the path to the file on the device
"""
data = {
'path': path,
}
return self.execute(Command.PULL_FILE, data)['value']
def pull_folder(self, path):
"""Retrieves a folder at `path`. Returns the folder's contents zipped
and encoded as Base64.
:Args:
- path - the path to the folder on the device
"""
data = {
'path': path,
}
return self.execute(Command.PULL_FOLDER, data)['value']
def push_file(self, destination_path, base64data=None, source_path=None):
"""Puts the data from the file at `source_path`, encoded as Base64, in the file specified as `path`.
Specify either `base64data` or `source_path`, if both specified default to `source_path`
:param destination_path: the location on the device/simulator where the local file contents should be saved
:param base64data: file contents, encoded as Base64, to be written to the file on the device/simulator
:param source_path: local file path for the file to be loaded on device
:return: WebDriver instance
"""
if source_path is None and base64data is None:
raise InvalidArgumentException('Must either pass base64 data or a local file path')
if source_path is not None:
try:
with open(source_path, 'rb') as f:
data = f.read()
except IOError:
message = 'source_path {} could not be found. Are you sure the file exists?'.format(source_path)
raise InvalidArgumentException(message)
base64data = base64.b64encode(data).decode('utf-8')
data = {
'path': destination_path,
'data': base64data,
}
self.execute(Command.PUSH_FILE, data)
return self
# pylint: disable=protected-access
def _addCommands(self):
self.command_executor._commands[Command.PULL_FILE] = \
('POST', '/session/$sessionId/appium/device/pull_file')
self.command_executor._commands[Command.PULL_FOLDER] = \
('POST', '/session/$sessionId/appium/device/pull_folder')
self.command_executor._commands[Command.PUSH_FILE] = \
('POST', '/session/$sessionId/appium/device/push_file')
```
#### File: python-client/script/release.py
```python
import os
import sys
import io
VERSION_FILE_PATH = os.path.join(os.path.dirname('__file__'), 'appium', 'version.py')
CHANGELOG_PATH = os.path.join(os.path.dirname('__file__'), 'CHANGELOG.rst')
MESSAGE_RED = '\033[1;31m{}\033[0m'
MESSAGE_GREEN = '\033[1;32m{}\033[0m'
MESSAGE_YELLOW = '\033[1;33m{}\033[0m'
def get_current_version():
current = io.open(os.path.join(os.path.dirname('__file__'), 'appium',
'version.py'), encoding='utf-8').read().rstrip()
print('The current version is {}, type a new one'.format(MESSAGE_YELLOW.format(current)))
return current
def get_new_version():
print(MESSAGE_GREEN.format('new version:'))
for line in sys.stdin:
return line.rstrip()
VERSION_FORMAT = "version = '{}'\n"
def update_version_file(version):
new_version = VERSION_FORMAT.format(version)
with open(VERSION_FILE_PATH, 'w') as f:
f.write(new_version)
def call_bash_script(cmd):
if os.environ.get('DRY_RUN') is not None:
print('{} Calls: {}'.format(MESSAGE_RED.format('[DRY_RUN]'), cmd))
else:
os.system(cmd)
def commit_version_code(new_version_num):
call_bash_script('git commit {} -m "Bump {}"'.format(VERSION_FILE_PATH, new_version_num))
def tag_and_generate_changelog(new_version_num):
call_bash_script('git tag "v{}"'.format(new_version_num))
call_bash_script('gitchangelog > {}'.format(CHANGELOG_PATH))
call_bash_script('git commit {} -m "Update changelog for {}"'.format(CHANGELOG_PATH, new_version_num))
def upload_sdist(new_version_num):
call_bash_script('twine upload "dist/Appium-Python-Client-{}.tar.gz"'.format(new_version_num))
def push_changes_to_master(new_version_num):
call_bash_script('git push origin master')
call_bash_script('git push origin "v{}"'.format(new_version_num))
def ensure_publication(new_version_num):
if os.environ.get('DRY_RUN') is not None:
print('Run with {} mode.'.format(MESSAGE_RED.format('[DRY_RUN]')))
print('Are you sure to release as {}?[y/n]'.format(MESSAGE_YELLOW.format(new_version_num)))
for line in sys.stdin:
if line.rstrip().lower() == 'y':
return
exit('Canceled release pricess.')
def build_sdist():
call_bash_script('{} setup.py sdist'.format(sys.executable))
def validate_release_env():
if os.system('which twine') != 0:
exit("Please get twine via 'pip install twine'")
if os.system('which gitchangelog') != 0:
exit("Please get twine via 'pip install gitchangelog' or 'pip install git+git://github.com/vaab/gitchangelog.git' for Python 3.7")
def main():
validate_release_env()
get_current_version()
new_version = get_new_version()
update_version_file(new_version)
ensure_publication(new_version)
commit_version_code(new_version)
build_sdist()
tag_and_generate_changelog(new_version)
upload_sdist(new_version)
push_changes_to_master(new_version)
if __name__ == '__main__':
main()
``` |
{
"source": "JonahStrotmann/surfshark-linux-client",
"score": 2
} |
#### File: JonahStrotmann/surfshark-linux-client/log_window.py
```python
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk, Gdk
import time
class LogWindow(Gtk.Window):
def __init__(self, main):
Gtk.Window.__init__(self, title="SurfShark Client")
self.set_default_size(510, 720)
self.set_resizable(False)
self.move(400, 200)
self.set_icon_from_file(main.folder_path + "surfshark_linux_client.png")
self.log_container = Gtk.VBox()
self.add(self.log_container)
self.hbox = Gtk.HBox()
self.log_container.pack_start(self.hbox, True, True, 0)
self.vbox = Gtk.VBox()
self.hbox.pack_start(self.vbox, True, True, 25)
self.vbox.pack_start(Gtk.Label(), True, True, 0)
self.password_label = Gtk.Label()
self.password_label.set_markup("Password")
self.password_label.get_style_context().add_class('label')
self.vbox.pack_start(self.password_label, False, False, 0)
self.password = Gtk.Entry()
self.password.set_placeholder_text("Password")
self.password.connect("activate", main.log_action)
self.password.set_visibility(False)
self.vbox.pack_start(self.password, False, False, 0)
if not main.config['registered']:
self.vbox.pack_start(Gtk.Label(), False, False, 0)
self.confirm_password_label = Gtk.Label()
self.confirm_password_label.set_markup("Confirm Password")
self.confirm_password_label.get_style_context().add_class('label')
self.vbox.pack_start(self.confirm_password_label, False, False, 0)
self.confirm_password = Gtk.Entry()
self.confirm_password.set_placeholder_text("Confirm Password")
self.confirm_password.connect("activate", main.log_action)
self.confirm_password.set_visibility(False)
self.vbox.pack_start(self.confirm_password, False, False, 0)
button_container = Gtk.HBox()
button_container.pack_start(Gtk.Label(), True, True, 0)
log_button_text = "Log in" if main.config['registered'] else "Register"
self.log_button = Gtk.Button(label=log_button_text)
self.log_button.connect("clicked", main.log_action)
self.log_button.connect("enter-notify-event", self.hover)
self.log_button.connect("leave-notify-event", self.not_hover)
button_container.pack_start(self.log_button, False, False, 0)
button_container.pack_start(Gtk.Label(), True, True, 0)
self.vbox.pack_start(button_container, False, False, 30)
if(not main.config['registered']):
or_label = Gtk.Label('OR')
or_label.get_style_context().add_class('or-label')
self.vbox.pack_start(or_label, False, False, 20)
self.log_without_pass_button = Gtk.Button(label="Don't use password")
self.log_without_pass_button.connect("clicked", main.log_action)
self.log_without_pass_button.connect("enter-notify-event", self.hover)
self.log_without_pass_button.connect("leave-notify-event", self.not_hover)
self.vbox.pack_start(self.log_without_pass_button, False, False, 30)
self.vbox.pack_start(Gtk.Label(), True, True, 0)
def hover(self, listbox_widget, crossing):
self.get_window().set_cursor(Gdk.Cursor(Gdk.CursorType.HAND2))
def not_hover(self, listbox_widget, crossing):
self.get_window().set_cursor(None)
def animate_loader(self):
while True:
if (not self.load): break
time.sleep(0.8)
curr_points = self.loading_label_points.get_text()
if(curr_points == "..."):
self.loading_label_points.set_text(" ")
else:
n = 0
new_points = "."
for c in curr_points:
if (c == "."):
n += 1
new_points += "."
for i in range(1, 3 - n):
new_points += " "
self.loading_label_points.set_text(new_points)
``` |
{
"source": "JonahSussman/closure-language",
"score": 3
} |
#### File: JonahSussman/closure-language/parser.py
```python
from expr import Expr
from stmt import Stmt
class Parser:
class ParserError(Exception):
pass
def __init__(self, tokens):
self.tokens = tokens
self.c_tok = 0
def match(self, *kinds):
if self.c_tok == len(self.tokens):
return False
for kind in kinds:
if kind == self.tokens[self.c_tok].kind:
return True
return False
def error(self, token):
print('Parser Error! Invalid token: %s' % (token))
raise Parser.ParserError
def declaration(self):
try:
if self.match('FUNCTION'):
self.c_tok += 1
return self.function('function')
elif self.match('LET'):
self.c_tok += 1
return self.var_declaration()
else:
return self.statement()
except Parser.ParserError:
print(self.tokens)
exit()
def statement(self):
if self.match('IF'):
self.c_tok += 1
return self.if_statement()
elif self.match('WHILE'):
self.c_tok += 1
return self.while_statement()
elif self.match('PRINT'):
self.c_tok += 1
return self.print_statement()
elif self.match('RETURN'):
self.c_tok += 1
return self.return_statment()
elif self.match('L_BRACE'):
self.c_tok += 1
return Stmt.Block(self.block())
return self.expression_statment()
def block(self):
statements = []
while not self.match('R_BRACE') and self.c_tok < len(self.tokens):
statements.append(self.declaration())
self.c_tok += 1
return statements
def function(self, like):
if not self.match('ID'):
raise Parser.ParserError
name = self.tokens[self.c_tok].value
self.c_tok += 1
if not self.match('L_PAREN'):
print('Expected \'(\' after function name')
raise Parser.ParserError
self.c_tok += 1
params = []
if not self.match('R_PAREN'):
while True:
if not self.match('ID'):
print('Expected identifier in parameters.')
raise Parser.ParserError
params.append(self.tokens[self.c_tok])
self.c_tok += 1
if not self.match('COMMA'):
break
self.c_tok += 1
if not self.match('R_PAREN'):
print('Expected \')\' after function params')
raise Parser.ParserError
self.c_tok += 1
if not self.match('L_BRACE'):
print('Expected \'{\' before body')
raise Parser.ParserError
self.c_tok += 1
body = self.block()
return Stmt.Fn(name, params, body)
def print_statement(self):
value = self.expression()
if not self.match('ENDLINE'):
raise Parser.ParserError
self.c_tok += 1
return Stmt.Print(value)
def return_statment(self):
value = None
if not self.match('ENDLINE'):
value = self.expression()
if not self.match('ENDLINE'):
print('\\n must follow return value')
raise Parser.ParserError
self.c_tok += 1
return Stmt.Return('return', value)
def if_statement(self):
if not self.match('L_PAREN'):
raise Parser.ParserError
self.c_tok += 1
expression = self.expression()
if not self.match('R_PAREN'):
raise Parser.ParserError
self.c_tok += 1
then_branch = self.statement()
else_branch = None
if self.match('ELSE'):
self.c_tok += 1
else_branch = self.statement()
return Stmt.If(expression, then_branch, else_branch)
def while_statement(self):
if not self.match('L_PAREN'):
raise Parser.ParserError
self.c_tok += 1
expression = self.expression()
if not self.match('R_PAREN'):
raise Parser.ParserError
self.c_tok += 1
body = self.statement()
return Stmt.While(expression, body)
def expression_statment(self):
value = self.expression()
if not self.match('ENDLINE'):
raise Parser.ParserError
self.c_tok += 1
return Stmt.Expression(value)
def var_declaration(self):
if not self.match('ID'):
raise Parser.ParserError
name = self.tokens[self.c_tok].value
self.c_tok += 1
initalizer = None
if self.match('EQUAL'):
self.c_tok += 1
initalizer = self.expression()
if not self.match('ENDLINE'):
raise Parser.ParserError
self.c_tok += 1
return Stmt.Let(name, initalizer)
def expression(self):
return self.assignment()
def assignment(self):
expr = self.cast()
if self.match('EQUAL'):
self.c_tok += 1
value = self.assignment()
if isinstance(expr, Expr.Variable):
return Expr.Assign(expr.name, value)
else:
raise Parser.ParserError
return expr
def cast(self):
expr = self.equality()
if self.match('CAST'):
self.c_tok += 1
kind = self.cast()
return Expr.Cast(expr, kind)
return expr
def equality(self):
expr = self.comparison()
while self.match('BANG_EQUAL', 'EQUAL_EQUAL', 'AND', 'OR'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.comparison()
expr = Expr.Listed(operator, [expr, right])
return expr
def comparison(self):
expr = self.addition()
while self.match('LESS', 'GREATER', 'LESS_EQUAL', 'GREATER_EQUAL'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.addition()
expr = Expr.Listed(operator, [expr, right])
return expr
def addition(self):
expr = self.multiplication()
while self.match('PLUS', 'MINUS'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.multiplication()
expr = Expr.Listed(operator, [expr, right])
return expr
self.c_tok += 1
return Expr.Literal(self.tokens[self.c_tok - 1].value)
def multiplication(self):
expr = self.exponentiation()
while self.match('STAR', 'SLASH', 'MOD'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.exponentiation()
expr = Expr.Listed(operator, [expr, right])
return expr
self.c_tok += 1
return Expr.Literal(self.tokens[self.c_tok - 1].value)
def exponentiation(self):
stack = [self.negation()]
while self.match('CARET'):
self.c_tok += 1
stack.append(self.negation())
while len(stack) > 1:
right = stack.pop()
left = stack.pop()
stack.append(Expr.Listed('^', [left, right]))
return stack[0]
def negation(self):
if self.match('MINUS', 'NOT', 'LN', 'LOG_10', 'SQRT', 'INPUT'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.negation()
return Expr.Listed(operator, [right])
else:
return self.custom_root()
def custom_root(self):
expr = self.logbase()
while self.match('ROOT'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
right = self.logbase()
expr = Expr.Listed(operator, [expr, right])
return expr
def logbase(self):
if self.match('LOG'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
base = self.logbase()
argument = self.logbase()
return Expr.Listed(operator, [base, argument])
else:
return self.factorial()
def factorial(self):
expr = self.call()
while self.match('BANG'):
operator = self.tokens[self.c_tok].value
self.c_tok += 1
expr = Expr.Listed(operator, [expr])
return expr
def call(self):
expr = self.primary()
while True:
if self.match('L_PAREN'):
self.c_tok += 1
expr = self.finish_call(expr)
else:
break
return expr
def finish_call(self, callee):
arguments = []
if not self.match('R_PAREN'):
while True:
arguments.append(self.expression())
if not self.match('COMMA'):
break
self.c_tok += 1
if not self.match('R_PAREN'):
print('No \')\' after arguments!')
raise Parser.ParserError
paren = self.tokens[self.c_tok]
self.c_tok += 1
return Expr.Call(callee, paren, arguments)
def primary(self):
expr = None
token_value = self.tokens[self.c_tok].value
if self.match('ENDLINE'):
self.c_tok -= 1
expr = Expr.Literal(None)
elif self.match('TRUE'): expr = Expr.Literal(True)
elif self.match('FALSE'): expr = Expr.Literal(False)
elif self.match('NIL'): expr = Expr.Literal(None)
elif self.match('STRING'): expr = Expr.Literal(token_value[1:len(token_value)-1])
elif self.match('NUM'): expr = Expr.Literal(float(token_value))
elif self.match('KIND'): expr = Expr.Literal(token_value)
elif self.match('ID'): expr = Expr.Variable(token_value)
elif self.match('L_PAREN'):
self.c_tok += 1
expr = Expr.Grouping(self.expression())
if not self.match('R_PAREN'):
raise Parser.ParserError
if not expr:
raise Parser.ParserError
self.c_tok += 1
return expr
def parse(self):
statements = []
while self.c_tok < len(self.tokens):
statements.append(self.declaration())
return statements
``` |
{
"source": "jonahthelion/tensorboardY",
"score": 2
} |
#### File: tensorboardY/tensorboardY/tools.py
```python
import base64
import sys
from PIL import Image as PILImage
from matplotlib.backends.backend_agg import FigureCanvasAgg
if sys.version_info[0] >= 3:
from io import BytesIO
else:
import cStringIO
def check_type(val, type_names, islist=False):
if not islist:
vals = [val]
else:
vals = val
if not isinstance(type_names, list):
type_names = [type_names]
for val in vals:
match = []
for type_name in type_names:
match.append(isinstance(val, type_name))
assert(any(match)), \
"{} is type {} but expected {}"\
.format(val, val.__class__.__name__,
'or'.join(map(str, type_names)))
if islist:
return vals
def pil_to_b64(img):
if sys.version_info[0] >= 3:
buffer = BytesIO()
else:
buffer = cStringIO.StringIO()
img.save(buffer, 'PNG')
return base64.b64encode(buffer.getvalue())
def b64_to_pil(b64):
img = base64.b64decode(b64)
if sys.version_info[0] >= 3:
return PILImage.open(BytesIO(img)).convert('RGB')
else:
return PILImage.open(cStringIO.StringIO(img)).convert('RGB')
def mpl_to_pil(fig):
canvas = FigureCanvasAgg(fig)
canvas.draw()
pil_image = PILImage.frombytes('RGB', canvas.get_width_height(),
canvas.tostring_rgb()).convert('RGB')
return pil_image
```
#### File: tensorboardY/tensorboardY/widgets.py
```python
from PIL import Image as PILImage
from .tools import check_type, pil_to_b64, b64_to_pil
class Widget(object):
r"""
Base class for function input forms.
Args:
var (str): The function variable name this widget represents
name (str): The title of this widget
camera (bool): Allow the user to take pictures
image_upload (bool): Allow user to upload images
image_list (list[str]): List of file paths to images
image_names (list[str]): List of names the client will see
text_input (bool): Allow the user to input text
text_list (list[str]): List of example strings
text_names (list[str]): The names the client will see
option_list (list[str]): List of options. "Options" differ from "texts"
in that they won't be previewed to the client
option_names (list[str]): List of names the client will see
boolean (bool): Allow the user to input a boolean
slider (tuple): Tuple (min, max, increment) so for instance (0,1,.1)
would allow the user to choose 0, 0.1, 0.2, ..., 1.0.
slider_default (float): The initial position of the slider
"""
def __init__(self, var, name="Widget",
camera=False,
image_upload=False,
image_list=[], image_names=None,
text_input=False,
text_list=[], text_names=None,
option_list=[], option_names=None,
boolean=False,
slider=None, slider_default=None):
check_type(var, str)
self.var = var
self.name = name
check_type(camera, bool)
self.camera = camera
check_type(image_upload, bool)
self.image_upload = image_upload
self.image_list = [ex for ex in check_type(image_list,
str, islist=True)]
if image_names is None:
image_names = ["Image {}".format(i)
for i in range(len(self.image_list))]
self.image_names = [name for name in image_names]
assert(len(self.image_list) == len(self.image_names)),\
"{} != {}".format(len(self.image_list),
len(self.image_names))
check_type(text_input, bool)
self.text_input = text_input
self.text_list = [ex for ex in text_list]
if text_names is None:
len_limit = 35
text_names = [ex for ex in self.text_list]
for i, ex in enumerate(text_names):
if len(ex) > len_limit:
text_names[i] = "{}...".format(ex[:(len_limit - 3)])
self.text_names = [name for name in text_names]
assert(len(self.text_list) == len(self.text_names)),\
"{} != {}".format(len(self.text_list),
len(self.text_names))
self.option_list = [ex for ex in option_list]
if option_names is None:
option_names = ["Option {}".format(i)
for i in range(len(self.option_list))]
self.option_names = [name for name in option_names]
assert(len(self.option_list) == len(self.option_names)),\
"{} != {}".format(len(self.option_list),
len(self.option_names))
check_type(boolean, bool)
self.boolean = boolean
if slider is not None:
assert(len(slider) == 3), "slider {} not length 3"\
.format(len(slider))
self.slider = slider
self.slider_default = slider_default
def get_data(self, gui, opt_id):
if gui == 'upload_img':
assert(0 <= opt_id < len(self.image_list)),\
"opt_id {} not in [0,{})".format(opt_id, len(self.image_list))
img = PILImage.open(self.image_list[opt_id]).convert('RGB')
b64 = pil_to_b64(img)
return b64
if gui == 'upload_txt':
assert(0 <= opt_id < len(self.text_list)),\
"opt_id {} not in [0,{})".format(opt_id, len(self.text_list))
return self.text_list[opt_id]
def decode(self, arg):
if arg['kind'] == 'ignore':
return arg['data']
if arg['kind'] == 'opt_id':
return self.option_list[arg['data']]
if arg['kind'] == 'img':
return b64_to_pil(arg['data'])
if arg['kind'] == 'bool':
if arg['data'] == 'True':
return True
return False
assert(False), 'arg kind {} not understood'.format(arg['kind'])
class Image(Widget):
r"""
A template to build a `ty.Widget` for arguments that you know should
always be images.
Args:
var (str): The name of the argument that this widget represents
name (str): The title the user sees for this widget
camera (bool): Allow the user to take pictures
image_upload (bool): Allow user to upload images
exs (list[str]): List of file paths to images
ex_names (list[str]): The names for the images that the user sees
"""
def __init__(self, var, name="Image",
camera=True,
image_upload=True,
exs=[], ex_names=None, **kwargs):
super(Image, self).__init__(var=var, name=name,
camera=camera, image_upload=image_upload,
image_list=exs, image_names=ex_names,
**kwargs)
class Text(Widget):
r"""
A template to build arguments that you know should always be text.
Args:
var (str): The function variable name this widget represents
name (str): The title the user sees for this widget
text_input (bool): Allow the user to input text
exs (list[str]): List of example strings
ex_names (list[str]): The names for the texts that the user sees
"""
def __init__(self, var, name="Text",
text_input=True,
exs=[], ex_names=None, **kwargs):
super(Text, self).__init__(var=var, name=name,
text_input=text_input,
text_list=exs, text_names=ex_names,
**kwargs)
``` |
{
"source": "jonahweissman/cs3240-textbook-swap",
"score": 2
} |
#### File: cs3240-textbook-swap/marketplace/email.py
```python
from django.shortcuts import redirect
from django.core import mail
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from django.urls import reverse
from django.conf import settings
from django.views import generic
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import Q
from base64 import b64decode
import re
from . import models, forms
def send_message(author, receiver, conversation, text, in_response_to=None):
message = models.Message.objects.create(
author=author,
conversation=conversation,
text=text,
in_response_to=in_response_to)
notify_about_new_message(author.user,
receiver.user,
conversation.item,
text,
message.id)
@login_required
def send_intro_message(request):
author = request.user.profile
item = models.Item.objects.get(pk=request.POST['item'])
to = item.item_seller_name
text = request.POST['message']
conversation, _ = models.Conversation.objects.get_or_create(
item=item,
buyer=author,
)
send_message(author, to, conversation, text)
return redirect(reverse('marketplace:message_list',
args=[item.pk]))
def notify_about_new_message(sender, receiver, item, message, uuid):
name = f"{sender.first_name} {sender.last_name}"
subject = f"New message about {item.item_name} from {name}"
html_message = render_to_string('marketplace/notification_email.html',
{
'message': message,
'item': item,
'name': name,
})
message = mail.EmailMultiAlternatives(
subject=subject,
body=strip_tags(html_message),
to=[receiver.email],
reply_to=[f'{settings.CLOUDMAILIN_ID}+{<EMAIL>'],
)
message.attach_alternative(html_message, "text/html")
message.send()
@csrf_exempt
def receive_message(request):
if is_unauthorized(request):
return HttpResponse(status=403)
f = forms.ReceiveMessageForm(rename_fields(request.POST))
if not f.is_valid():
return HttpResponse(status=400, reason="Failed to validate form")
message = f.save()
receiver = other_participant(message.conversation, message.author)
notify_about_new_message(sender=message.author.user,
receiver=receiver.user,
item=message.conversation.item,
message=message.text,
uuid=message.id)
return HttpResponse(status=200)
def other_participant(conversation, person_a):
if conversation.buyer == person_a:
return conversation.item.item_seller_name
else:
return conversation.buyer
def rename_fields(post):
return {
'in_response_to': post['headers[To]'],
'author': post['headers[From]'],
'text': post['reply_plain'] or post['plain'],
}
def is_unauthorized(request):
if not 'authorization' in request.headers:
return True
authorization_re = re.compile(r'Basic (.+)')
authorization = authorization_re.search(request.headers['authorization'])
if not authorization:
return True
authorization = b64decode(authorization.group(1)).decode('ascii')
return (authorization is None or
authorization != settings.CLOUDMAILIN_CREDENTIALS)
class ConversationView(LoginRequiredMixin, generic.ListView):
model=models.Conversation
def get_queryset(self):
return self.model.objects.filter(
# only return conversation that are
# 1) about this item, and
# 2) involving this user as either buyer or seller
Q(item__pk=self.kwargs['pk'])
& (Q(buyer=self.request.user.profile)
| Q(item__item_seller_name=self.request.user.profile))
)
def get_context_data(self):
context = super().get_context_data()
conversation_list = []
for conversation_obj in context['object_list']:
conversation = {}
to = other_participant(conversation_obj, self.request.user.profile)
conversation['to'] = to
conversation['form'] = forms.SendMessageForm(
initial={
'to': to,
'item': conversation_obj.item,
'conversation': conversation_obj,
'in_response_to': conversation_obj.message_set.order_by('date').last()
}
)
conversation['conversation'] = conversation_obj
conversation_list.append(conversation)
context['conversation_list'] = conversation_list
context['item'] = models.Item.objects.get(pk=self.kwargs['pk'])
return context
def post(self, request, pk):
form = forms.SendMessageForm(request.POST)
if form.is_valid():
send_message(author=request.user.profile,
receiver=form.cleaned_data['to'],
conversation=form.cleaned_data['conversation'],
in_response_to=form.cleaned_data['in_response_to'],
text=form.cleaned_data['text'])
return self.get(request)
```
#### File: cs3240-textbook-swap/marketplace/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from phone_field import PhoneField
from django_auto_one_to_one import AutoOneToOneModel
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.urls import reverse
from django.core.validators import MinValueValidator
import uuid
import os
# Create your models here.
class Profile(AutoOneToOneModel(User)):
user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True)
imagefile = models.ImageField(upload_to='images/', null=True, blank=True)
phonenumber = PhoneField(null=True, blank=True)
major = models.CharField(max_length=50, null=True, blank=True)
year = models.IntegerField(null=True, blank=True, validators=[MinValueValidator(0)])
def __str__(self):
return f'{self.user.first_name} {self.user.last_name}'
@receiver(post_save, sender=User)
def ensure_profile_exists(sender, **kwargs):
if kwargs.get('created', False):
Profile.objects.get_or_create(user=kwargs.get('instance'))
# @receiver(models.signals.pre_save, sender=Profile)
# def auto_delete_file_on_change(sender, instance, **kwargs):
# if not instance.pk:
# return False
# try:
# old_file = sender.objects.get(pk=instance.pk).imagefile
# except sender.DoesNotExist:
# return False
# new_file = instance.imagefile
# if not old_file == new_file:
# if not old_file.name == '' and os.path.isfile(old_file.path):
# os.remove(old_file.path)
class Item(models.Model):
item_condition_choices = (("Like New", "Like New"),("Good", "Good"), ("Fair", "Fair"), ("Poor", "Poor"))
#Any changes to name, author, or description max_length will need to be refected in views.py's book API string value truncation
item_name = models.CharField(max_length=300, null=True)
item_isbn = models.CharField(max_length=100, null=True)
item_edition = models.IntegerField(null=True, default = 1)
item_author = models.CharField(max_length=100, null=True)
item_course = models.CharField(max_length=100, null=True)
item_price = models.IntegerField(null=True)
item_image = models.ImageField(upload_to='images/', null=True)
item_condition = models.CharField(max_length=20, choices=item_condition_choices, default= "Like New")
item_posted_date = models.DateField(null=True)
item_seller_name = models.ForeignKey(Profile, on_delete=models.CASCADE, null=True)
item_description = models.TextField(max_length= 2000, null=True)
item_status_choices = (("Available", "Available"),("Sold", "Sold"), ("Hidden", "Hidden"))
item_status = models.CharField(max_length=20, choices=item_status_choices, default= "Available")
def __str__(self):
return self.item_name
def get_absolute_url(self):
return reverse('marketplace:myListings')
class Conversation(models.Model):
buyer = models.ForeignKey(Profile, on_delete=models.CASCADE)
item = models.ForeignKey(Item, on_delete=models.CASCADE)
def __str__(self):
return f'{self.buyer} interested in {self.item}'
class Message(models.Model):
author = models.ForeignKey(Profile, on_delete=models.CASCADE)
date = models.DateTimeField(auto_now_add=True)
text = models.TextField()
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
in_response_to = models.ForeignKey('self', on_delete=models.CASCADE, null=True)
conversation = models.ForeignKey(Conversation, on_delete=models.CASCADE)
def __str__(self):
return self.text
``` |
{
"source": "jonahweissman/toltec",
"score": 2
} |
#### File: scripts/toltec/repo.py
```python
from collections import namedtuple
from datetime import datetime
import gzip
import itertools
import logging
import os
from typing import Dict, Iterable, List, Optional
import requests
from .graphlib import TopologicalSorter
from .recipe import Package, Recipe
from .util import file_sha256, HTTP_DATE_FORMAT
from .version import DependencyKind
from . import templating
logger = logging.getLogger(__name__)
GroupedPackages = Dict[Recipe, List[Package]]
FetchedMissing = namedtuple("FetchedMissing", ["fetched", "missing"])
class Repo:
"""Repository of Toltec packages."""
def __init__(self, recipe_dir: str, repo_dir: str) -> None:
"""
Initialize the package repository.
:param recipe_dir: directory where recipe definitions are stored
:param repo_dir: directory where built packages are stored
"""
self.recipe_dir = recipe_dir
self.repo_dir = repo_dir
self.recipes = {}
for name in os.listdir(self.recipe_dir):
if name[0] != ".":
self.recipes[name] = Recipe.from_file(
os.path.join(self.recipe_dir, name)
)
def fetch_packages(self, remote: Optional[str]) -> FetchedMissing:
"""
Fetch locally missing packages from a remote server and report which
packages are missing from the remote and need to be built locally.
If `remote` is None, no packages are fetched from the network and all
the packages that are not in the local repo will be considered missing.
:param remote: remote server from which to check for existing packages
:returns: tuple containing fetched and missing packages grouped by
their parent recipe
"""
logger.info("Scanning for missing packages")
fetched: GroupedPackages = {}
missing: GroupedPackages = {}
for recipe in self.recipes.values():
fetched[recipe] = []
missing[recipe] = []
for package in recipe.packages.values():
filename = package.filename()
local_path = os.path.join(self.repo_dir, filename)
if os.path.isfile(local_path):
continue
if remote is not None:
remote_path = os.path.join(remote, filename)
req = requests.get(remote_path)
if req.status_code == 200:
with open(local_path, "wb") as local:
for chunk in req.iter_content(chunk_size=1024):
local.write(chunk)
last_modified = int(
datetime.strptime(
req.headers["Last-Modified"],
HTTP_DATE_FORMAT,
).timestamp()
)
os.utime(local_path, (last_modified, last_modified))
fetched[recipe].append(package)
continue
logger.info(
"Package %s (%s) is missing", package.pkgid(), recipe.name
)
missing[recipe].append(package)
return FetchedMissing(fetched=fetched, missing=missing)
@staticmethod
def order_dependencies(recipes: List[Recipe]) -> Iterable[Recipe]:
"""
Order a list of recipes so that all recipes that a recipe needs
come before that recipe in the list.
:param recipes: list of recipes to order
:returns: ordered list of recipes
:raises graphlib.CycleError: if a circular dependency exists
"""
# See <https://github.com/PyCQA/pylint/issues/2822>
toposort: TopologicalSorter[ # pylint:disable=unsubscriptable-object
Recipe
] = TopologicalSorter()
parent_recipes = {}
for recipe in recipes:
for package in recipe.packages.values():
parent_recipes[package.name] = recipe
for recipe in recipes:
deps = []
for dep in recipe.makedepends:
if (
dep.kind == DependencyKind.Host
and dep.package in parent_recipes
):
deps.append(parent_recipes[dep.package])
toposort.add(recipe, *deps)
return toposort.static_order()
def make_index(self) -> None:
"""Generate index files for all the packages in the repo."""
logger.info("Generating package index")
index_path = os.path.join(self.repo_dir, "Packages")
index_gzip_path = os.path.join(self.repo_dir, "Packages.gz")
with open(index_path, "w") as index_file:
with gzip.open(index_gzip_path, "wt") as index_gzip_file:
for recipe in self.recipes.values():
for package in recipe.packages.values():
filename = package.filename()
local_path = os.path.join(self.repo_dir, filename)
if not os.path.isfile(local_path):
continue
control = package.control_fields()
control += f"""Filename: {filename}
SHA256sum: {file_sha256(local_path)}
Size: {os.path.getsize(local_path)}
"""
index_file.write(control)
index_gzip_file.write(control)
def make_listing(self) -> None:
"""Generate the static web listing for packages in the repo."""
logger.info("Generating web listing")
by_section = lambda package: package.section
packages = [
package
for recipe in self.recipes.values()
for package in recipe.packages.values()
]
sections = dict(
(section, list(group))
for section, group in itertools.groupby(
sorted(packages, key=by_section), key=by_section
)
)
listing_path = os.path.join(self.repo_dir, "index.html")
template = templating.env.get_template("listing.html")
with open(listing_path, "w") as listing_file:
listing_file.write(template.render(sections=sections))
``` |
{
"source": "jonahwu/cner",
"score": 3
} |
#### File: gentraindata/lib/cimatch.py
```python
from fuzzywuzzy import fuzz
from lib import common
import re
import time
from parse import *
def matchFuzz(k, t):
score = fuzz.partial_ratio(k,t) #抽取匹配
#print(k, t, score)
if score==100:
return True
else:
return False
def matchPy(k, t):
#re.match(t, k)
return t in k
def getDocTime(k):
docTime=0
try:
ddd = search('{:d}/{:d}/{:d}{:s}{:d}:{:d}', k)
#print(ddd[0])
docTime=ddd[0]
#print('---- we go doc time----',docTime)
except:
docTime=0
#print(' can not get Doc time')
return docTime
def checkListExisted(data, mlist):
#a_string = "A string is more than its parts!"
#matches = ["more", "string"]
if all(x in data for x in mlist):
return True
else:
return False
def getBirthDirect(k, target):
kk = common.textToSentence(k, fine=True)
birthdata=''
for kkk in kk:
#if ('生' and target) in kkk:
checkList=['生', target]
if checkListExisted(kkk, checkList):
try:
ddd = re.search(r'(\d{4}年)',kkk)
#print('----------ddd:',ddd.group(), kkk)
birthdata = ddd.group()
birthdata = birthdata.replace('年','')
#print('get directly',birthdata)
return birthdata
except:
birthdata=''
return None
def getAgeDirect(k, target):
kk = common.textToSentence(k, fine=True)
age=''
for kkk in kk:
if target in kkk:
try:
s = re.search(r'(\d+歲)',kkk)
age=s.group()
return age
except:
age=''
def searchAgeFromDoc(k, target):
#print('into search birthday')
age=''
docTime=None
birthday=None
#s = re.search('[0-9]+歲', '100歲')
docTime = getDocTime(k)
#try:
#s = re.search('[0-9]+歲', k)
birthday = getBirthDirect(k, target)
#print('--- show birthday----',birthday)
if not birthday:
#print('--- find age ------')
age = getAgeDirect(k, target)
#s = re.search(r'(\d+歲)',k)
#age=s.group()
if age:
rage = age.replace('歲','')
#rage=age
if docTime and rage:
#print(docTime, rage,'birthday:', int(docTime)-int(rage))
#print(docTime, rage, int(docTime)-int(rage))
#print(docTime, rage)
birthday=str(int(docTime)-int(rage))
#except:
# rage=age
# print('----- can not get birthday might be some error------')
return birthday
def XsearchAgeFromDoc(k, target):
age=''
docTime=None
birthday=None
#s = re.search('[0-9]+歲', '100歲')
docTime = getDocTime(k)
try:
#s = re.search('[0-9]+歲', k)
birthday = getBirthDirect(k)
if not birthday:
s = re.search(r'(\d+歲)',k)
age=s.group()
if age:
rage = age.replace('歲','')
#rage=age
if docTime and rage:
#print(docTime, rage, int(docTime)-int(rage))
birthday=str(int(docTime)-int(rage))
except:
rage=age
return birthday
def matchTarget(docs, target, cdict):
#cPRatio = fuzz.partial_ratio("治略位於台北市中正區忠孝東路二段27號3樓","忠孝東路") #抽取匹配
mtargets=common.readFileAsListWFirst(cdict)
#print(mtargets)
mdocs={}
#print(cdict)
for k in docs:
common.textToSentence(docs[k])
mres=[]
#print(k)
# match target
for t in mtargets:
t=t.strip()
#cPRatio = fuzz.partial_ratio("治略位於台北市中正區忠孝東路二段27號3樓","忠孝東路") #抽取匹配
#mflag = matchPy(docs[k], t)
mflag = matchFuzz(docs[k], t)
if mflag:
mres.append(t)
# match age need to reasoning to get the result but now it's a simple test
birthday = searchAgeFromDoc(docs[k], target)
if birthday:
mres.append(birthday)
#print('age', age)
mdocs[k]=common.dedupeList(mres)
#print(mdocs)
return mdocs
def storeMatch(mdocs):
common.storeFilebyDict(mdocs)
```
#### File: gentraindata/lib/common.py
```python
import os
import re
import random
import string
def textToSentence(text, fine=True):
#sentences = re.split(r"[.!?。,;]", text)
sentences = re.split(r"['.','!','?','。',',',';','...']", text)
sentences = [sent.strip(" ") for sent in sentences]
return sentences
def XtextToSentence(text, fine=False):
if not fine:
punctuation = re.compile(r"([^\d+])(\.|!|\?|;|\n|。|!|?|;|…| |!|؟|؛)+")
else:
punctuation = re.compile(r"([^\d+])(\.|!|\?|;|\n|。|!|,|,|?|;|…| |!|؟|؛)+")
# 發生 and year but not same comma
lines = []
lines = punctuation.sub(r"\1\2<pad>", text)
lines = [line.strip() for line in lines.split("<pad>") if line.strip()]
#print('--------------- show senetence -------------------')
"""
for line in lines:
print('---line---',line)
print('--------------- show senetence done -------------------')
"""
return lines
def readFileAsList(filename):
f = open(filename, "r")
dictdata = f.read().splitlines()
f.close()
return dictdata
def readFileAsListWFirst(filename, sep=','):
datalist=readFileAsList(filename)
dlist = []
for d in datalist:
dlist.append(d.split(sep)[0].strip())
return dlist
def dedupeList(alllist):
delist = list(set(alllist))
return delist
def storeFilebyList(mm, storefile='keywordlist.txt'):
#storefile='keywordlist.txt'
ff = open(storefile, "w")
for sm in mm:
ff.write(sm)
ff.write('\n')
ff.close()
def storeFilebyDict(mdocs):
for m in mdocs:
storefile=m.replace('data','datam')
ff = open(storefile, "w")
mm = mdocs[m]
for sm in mm:
ff.write(sm)
ff.write('\n')
ff.close()
def randomFile():
file_name = ''.join(random.choice(string.ascii_lowercase) for i in range(16))
#print(file_name)
return file_name
def clenanSentence(s):
sentence = ''.join(s.split())
return sentence
```
#### File: cner/scripts/visualize_model.py
```python
import spacy_streamlit
import typer
def main(models: str, default_text: str):
models = [name.strip() for name in models.split(",")]
spacy_streamlit.visualize(models, default_text, visualizers=["ner"])
if __name__ == "__main__":
try:
typer.run(main)
except SystemExit:
pass
``` |
{
"source": "jonahx/bash-lib",
"score": 3
} |
#### File: test-utils/tap2junit/tap2junit.py
```python
import os
import re
import sys
from junit_xml import TestSuite, TestCase
class Tap2JUnit:
""" This class reads a subset of TAP (Test Anything protocol)
and writes JUnit XML.
Two line formats are read:
1. (not )?ok testnum testname
2. # diagnostic output
1. Starts a new test result.
2. Adds diagnostic information to the last read result
Any 2. lines found before a 1. line are ignored.
Any lines not matching either pattern are ignored.
This script was written because none of the tap2junit converters
I could find inserted the failure output into the junit correctly.
And IMO a failed test with no indication of why is useless.
"""
def __init__(self, test_suite, test_class):
self.test_suite = test_suite
self.test_class = test_class
# This Regex matches a (not) ok testnum testname line from the
# TAP specification, using named capture groups
self.result_re = re.compile(
r"^(?P<result>not )?ok\s*(?P<testnum>[0-9])+\s*(?P<testname>.*)$")
self.comment_re = re.compile(r"^\s*#")
self.case = None
self.cases = []
def process_line(self, line):
""" This funuction reads a tap stream line by line
and groups the diagnostic output with the relevant
result in a dictionary.
Outputs a list of dicts, one for each result
"""
match = self.result_re.match(line)
if match:
# This line starts a new test result
self.case = match.groupdict()
self.case['stderr'] = []
self.cases.append(self.case)
return
match = self.comment_re.match(line)
if match and self.case:
# This line contains diagnostic
# output from a failed test
self.case['stderr'].append(re.sub(r'^\s*#', '', line).rstrip())
def convert(self, infile=sys.stdin, out=sys.stdout):
""" Reads a subset of TAP and writes JUnit XML """
# read lines
for line in infile.readlines():
self.process_line(line)
# Convert line dicts to test case objects
case_objs = []
for case in self.cases:
case_obj = TestCase(case['testname'], self.test_class, 0, '', '')
if case['result'] == 'not ':
case_obj.add_failure_info(output="\n".join(case['stderr']))
case_objs.append(case_obj)
# Combine test cases into a suite
suite = TestSuite(self.test_suite, case_objs)
# Write the suite out as XML
TestSuite.to_file(out, [suite])
def main():
t2j = Tap2JUnit(
os.environ.get('JUNIT_TEST_SUITE', 'tap2junit'),
os.environ.get('JUNIT_TEST_CLASS', 'tap2junit')
)
t2j.convert()
if __name__ == "__main__":
main()
``` |
{
"source": "JonahY/AE_GUI",
"score": 2
} |
#### File: JonahY/AE_GUI/about.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
import resource
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(483, 150)
Dialog.setMinimumSize(QtCore.QSize(483, 150))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/logo.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setStyleSheet("#information {background-color: transparent;border:none;border-style:outset;color: rgb(48, 0, 72);font: 75 10pt \"Verdana\";}")
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.about = QtWidgets.QGroupBox(Dialog)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.about.sizePolicy().hasHeightForWidth())
self.about.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(12)
font.setBold(True)
font.setItalic(True)
font.setWeight(75)
self.about.setFont(font)
self.about.setObjectName("about")
self.gridLayout_2 = QtWidgets.QGridLayout(self.about)
self.gridLayout_2.setObjectName("gridLayout_2")
self.information = QtWidgets.QTextEdit(self.about)
self.information.setReadOnly(True)
self.information.setObjectName("information")
self.gridLayout_2.addWidget(self.information, 0, 0, 1, 1)
self.label = QtWidgets.QLabel(self.about)
self.label.setGeometry(QtCore.QRect(325, 20, 54, 50))
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap("../test/XJTU.gif"))
self.label.setScaledContents(True)
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.about)
self.label_2.setGeometry(QtCore.QRect(380, 20, 40, 50))
self.label_2.setText("")
self.label_2.setPixmap(QtGui.QPixmap("../test/MSE.gif"))
self.label_2.setScaledContents(True)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.about, 0, 0, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "About information"))
self.about.setTitle(_translate("Dialog", "About"))
self.information.setHtml(_translate("Dialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Verdana\'; font-size:10pt; font-weight:72; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
```
#### File: JonahY/AE_GUI/check_license.py
```python
from get_mac_addr import get_mac_address, hash_msg
from datetime import datetime
class CheckLicense():
"""
Check user's license.
"""
def __init__(self):
pass
def check_psw(self, psw):
"""
check encoded password in user's license.
:param psw: str, encoded password.
:return: boolean, check result.
"""
mac_addr = get_mac_address()
hashed_msg = hash_msg('faw' + str(mac_addr))
if psw == hashed_msg:
return True
else:
return False
def check_date(self, lic_date):
"""
check datetime in user's license.
:param lic_date: str, license datetime.
:return: boolean, if the active days smaller than current_time, return Flase.
"""
current_time = datetime.now().isoformat() # get current time which is iso format.
current_time_array = datetime.strptime(current_time, "%Y-%m-%dT%H:%M:%S.%f") # switch the str datetime to array.
lic_date_array = datetime.strptime(lic_date, "%Y-%m-%dT%H:%M:%S") # the array type is datetime.datetime.
remain_days = lic_date_array - current_time_array
remain_days = remain_days.days
if remain_days < 0 or remain_days == 0:
return False
else:
return True
def check_update(self, longest, lic_date):
"""
check datetime in user's license.
:param lic_date: str, license datetime.
:return: boolean, if the active days smaller than current_time, return Flase.
"""
longest_time_array = datetime.strptime(longest, "%Y-%m-%dT%H:%M:%S") # switch the str datetime to array.
lic_date_array = datetime.strptime(lic_date, "%Y-%m-%dT%H:%M:%S") # the array type is datetime.datetime.
remain_days = lic_date_array - longest_time_array
remain_days = remain_days.days
if remain_days < 0 or remain_days == 0:
return False
else:
return True
def get_authorization_days(self):
"""
active datetime by user in first time.
:return: str, current datetime.
"""
active_date = datetime.now().isoformat(sep=' ') # current time, the separator is space.
return active_date
if __name__ == '__main__':
# # current_time = datetime.now().isoformat('T') # https://www.cnblogs.com/yyds/p/6369211.html
# # print(current_time)
# # current_time = datetime.strptime('2017/02/04 20:49', '%Y/%m/%d %H:%M')
# time1 = '2020-05-27T10:43:12.400947'
# timeArray = datetime.strptime(time1, "%Y-%m-%dT%H:%M:%S.%f")
# # timeArray = datetime.fromtimestamp(time1)
# print(timeArray)
# print(type(timeArray))
# time2 = '2018-05-27T10:43:12.400947'
# timeArray_2 = datetime.strptime(time2, "%Y-%m-%dT%H:%M:%S.%f")
# d_delta = timeArray_2 - timeArray
# print(d_delta)
# print(d_delta.days)
# # day_time = datetime(timeArray)
# # print(day_time)
# # dt = datetime.fromtimestamp(timeArray)
# # print(dt)
# # d1 = datetime(timeArray)
# # print(d1)
#
# # d2 = datetime('')
# # day_delta = d1 - d2
# # print(day_delta)
time1 = '2022-05-27T10:43:12.400947'
check_date_result = CheckLicense().check_date(time1)
print(check_date_result)
pass
# mac_addr = get_mac_address()
# hash_result = hash_msg(mac_addr)
# print(hash_result)
# check_lic = CheckLicense()
# check_state = check_lic.check_psw(hash_result)
# print(check_state)
```
#### File: JonahY/AE_GUI/preprocess.py
```python
"""
@version: 2.0
@author: Jonah
@file: __init__.py
@time: 2021/11/10 12:56
"""
import os
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
import math
import multiprocessing
import argparse
import time
from multiprocessing import cpu_count
import sys
from scipy.fftpack import fft
import csv
from plot_format import plot_norm
from kmeans import KernelKMeans, ICA
from utils import *
from wave_freq import *
from features import *
import warnings
from matplotlib.pylab import mpl
# os.getcwd()
np.seterr(invalid='ignore')
class Preprocessing:
def __init__(self, idx, thr_dB, magnification_dB, data_path, processor):
self.idx = idx
self.thr_dB = thr_dB
self.magnification_dB = magnification_dB
self.thr_V = pow(10, self.thr_dB / 20) / pow(10, 6)
self.counts = 0
self.duration = 0
self.amplitude = 0
self.rise_time = 0
self.energy = 0
self.RMS = 0
self.hit_num = 0
self.time = 0
self.channel_num = 0
self.sample_interval = 0
self.freq_max = 0
self.magnification = pow(10, self.magnification_dB / 20)
self.data_path = data_path
self.processor = processor
def skip_n_column(self, file, n=3):
for _ in range(n):
file.readline()
def cal_features(self, dataset, time_label, valid_wave_idx):
start = time_label[valid_wave_idx[0]]
end = time_label[valid_wave_idx[-1]]
self.duration = end - start
max_idx = np.argmax(abs(dataset))
self.amplitude = abs(dataset[max_idx])
self.rise_time = time_label[max_idx] - start
valid_data = dataset[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
self.energy = np.sum(np.multiply(pow(valid_data, 2), self.sample_interval))
self.RMS = math.sqrt(self.energy / self.duration)
return valid_data
def cal_counts(self, valid_data):
self.counts = 0
N = len(valid_data)
for idx in range(1, N):
if valid_data[idx - 1] <= self.thr_V <= valid_data[idx]:
self.counts += 1
def cal_freq(self, valid_data, valid_wave_idx):
Fs = 1 / self.sample_interval
N = valid_wave_idx[-1] - valid_wave_idx[0] + 1
frq = (np.arange(N) / N) * Fs
fft_y = fft(valid_data)
abs_y = np.abs(fft_y) / N
half_frq = frq[range(int(N / 2))]
abs_y_half = abs_y[range(int(N / 2))]
abs_y_half[0] = 0
self.freq_max = half_frq[np.argmax(abs_y_half)]
def save_features(self, result):
valid, tra_1, tra_2, tra_3, tra_4 = [], [], [], [], []
txt_name = self.data_path.split('/')[-1] + '.txt'
f = open(txt_name, "w")
f.write("ID, Time(s), Chan, Thr(μV), Thr(dB), Amp(μV), Amp(dB), "
"RiseT(s), Dur(s), Eny(aJ), RMS(μV), Frequency(Hz), Counts\n")
pbar = tqdm(result, ncols=100)
for idx, i in enumerate(pbar):
tmp, tmp_tra_1, tmp_tra_2, tmp_tra_3, tmp_tra_4 = i.get()
valid += tmp
tra_1.append(tmp_tra_1)
tra_2.append(tmp_tra_2)
tra_3.append(tmp_tra_3)
tra_4.append(tmp_tra_4)
pbar.set_description("Exporting Data: {}/{}".format(idx + 1, self.processor))
valid = sorted(valid, key=lambda s: float(s.split(',')[0]))
for i in valid:
f.write(i)
f.close()
# print(valid_data)
return valid, tra_1, tra_2, tra_3, tra_4
def main(self, file_name, data=[], tra_1=[], tra_2=[], tra_3=[], tra_4=[], min_cnts=2):
pbar = tqdm(file_name, ncols=100)
for name in pbar:
with open(name, "r") as f:
self.skip_n_column(f)
self.sample_interval = float(f.readline()[29:])
self.skip_n_column(f)
points_num = int(f.readline()[36:])
self.channel_num = int(f.readline().strip()[16:])
self.hit_num = int(f.readline()[12:])
self.time = float(f.readline()[14:])
dataset = np.array([float(i.strip("\n")) for i in f.readlines()[1:]]) / self.magnification
time_label = np.linspace(self.time, self.time + self.sample_interval * (points_num - 1), points_num)
# calculate the duration, amplitude, rise_time, energy and counts
valid_wave_idx = np.where(abs(dataset) >= self.thr_V)[0]
# print(dataset[0], dataset[-1], len(dataset))
# print(valid_wave_idx, valid_wave_idx.shape)
if self.channel_num == 1:
tra_1.append([self.time, self.channel_num, self.sample_interval, points_num, dataset*pow(10, 6), self.hit_num])
elif self.channel_num == 2:
tra_2.append([self.time, self.channel_num, self.sample_interval, points_num, dataset*pow(10, 6), self.hit_num])
elif self.channel_num == 3:
tra_3.append([self.time, self.channel_num, self.sample_interval, points_num, dataset*pow(10, 6), self.hit_num])
elif self.channel_num == 4:
tra_4.append([self.time, self.channel_num, self.sample_interval, points_num, dataset*pow(10, 6), self.hit_num])
if valid_wave_idx.shape[0] > 1:
valid_data = self.cal_features(dataset, time_label, valid_wave_idx)
self.cal_counts(valid_data)
if self.counts > min_cnts:
self.cal_freq(valid_data, valid_wave_idx)
tmp_feature = '{}, {:.7f}, {}, {:.8f}, {:.1f}, {:.8f}, {:.1f}, {:.7f}, {:.7f}, {:.8f}, {:.8f}' \
', {:.8f}, {}\n'.format(self.hit_num, self.time, self.channel_num,
self.thr_V * pow(10, 6), self.thr_dB,
self.amplitude * pow(10, 6),
20 * np.log10(self.amplitude * pow(10, 6)),
self.rise_time, self.duration, self.energy * pow(10, 14),
self.RMS * pow(10, 6), self.freq_max, self.counts)
data.append(tmp_feature)
pbar.set_description("Process: %s | Calculating: %s" % (self.idx, name.split('_')[2]))
# ID, Time(s), Chan, Thr(μV)P, Thr(dB), Amp(μV), Amp(dB), RiseT(s), Dur(s), Eny(aJ), RMS(μV), Counts
# print("-" * 50)
# print(self.hit_num, self.time * pow(10, 6), self.channel_num, self.thr_V * pow(10, 6),
# self.amplitude * pow(10, 6), self.rise_time * pow(10, 6), self.duration * pow(10, 6),
# self.energy * pow(10, 14), self.RMS * pow(10, 6), self.counts)
return data, tra_1, tra_2, tra_3, tra_4
def read_pac_data(self, file_name, tra_1=[], tra_2=[], tra_3=[], tra_4=[]):
pbar = tqdm(file_name, ncols=100)
for name in pbar:
with open(name, "r") as f:
self.skip_n_column(f)
self.sample_interval = float(f.readline()[29:])
self.skip_n_column(f)
points_num = int(f.readline()[36:])
self.channel_num = int(f.readline().strip()[16:])
self.hit_num = int(f.readline()[12:])
self.time = float(f.readline()[14:])
dataset = np.array([float(i.strip("\n")) for i in f.readlines()[1:]]) / self.magnification * pow(10, 6)
if self.channel_num == 1:
tra_1.append([self.time, self.channel_num, self.sample_interval, points_num, dataset, self.hit_num])
elif self.channel_num == 2:
tra_2.append([self.time, self.channel_num, self.sample_interval, points_num, dataset, self.hit_num])
elif self.channel_num == 3:
tra_3.append([self.time, self.channel_num, self.sample_interval, points_num, dataset, self.hit_num])
elif self.channel_num == 4:
tra_4.append([self.time, self.channel_num, self.sample_interval, points_num, dataset, self.hit_num])
pbar.set_description("Process: %s | Calculating: %s" % (self.idx, name.split('_')[2]))
return tra_1, tra_2, tra_3, tra_4
def read_pac_features(self, res, min_cnts=2):
pri, chan_1, chan_2, chan_3, chan_4 = [], [], [], [], []
pbar = tqdm(res, ncols=100)
for i in pbar:
tmp = []
ls = i.strip("\n").split(', ')
if int(ls[-1]) > min_cnts:
for r, j in zip([0, 7, 0, 8, 1, 8, 1, 7, 7, 8, 8, 8, 0], ls):
tmp.append(int(j) if r == 0 else round(float(j), r))
pri.append(tmp)
if int(ls[2]) == 1:
chan_1.append(tmp)
elif int(ls[2]) == 2:
chan_2.append(tmp)
elif int(ls[2]) == 3:
chan_3.append(tmp)
elif int(ls[2]) == 4:
chan_4.append(tmp)
pbar.set_description("Process: %s | Calculating: %s" % (self.idx, ls[0]))
return pri, chan_1, chan_2, chan_3, chan_4
def convert_pac_data(file_list, data_path, processor, threshold_dB, magnification_dB):
# check existing file
tar = data_path.split('/')[-1] + '.txt'
if tar in file_list:
print("=" * 46 + " Warning " + "=" * 45)
while True:
ans = input("The exported data file has been detected. Do you want to overwrite it: (Enter 'yes' or 'no') ")
if ans.strip() == 'yes':
os.remove(tar)
break
elif ans.strip() == 'no':
sys.exit(0)
print("Please enter 'yes' or 'no' to continue!")
file_list = os.listdir(data_path)
each_core = int(math.ceil(len(file_list) / float(processor)))
result, data_tra, tmp_all = [], [], []
print("=" * 47 + " Start " + "=" * 46)
start = time.time()
# Multiprocessing acceleration
pool = multiprocessing.Pool(processes=processor)
for idx, i in enumerate(range(0, len(file_list), each_core)):
process = Preprocessing(idx, threshold_dB, magnification_dB, data_path, processor)
result.append(pool.apply_async(process.main, (file_list[i:i + each_core],)))
pri, tra_1, tra_2, tra_3, tra_4 = process.save_features(result)
pool.close()
pool.join()
for idx, tra in enumerate([tra_1, tra_2, tra_3, tra_4]):
tra = [j for i in tra for j in i]
try:
data_tra.append(sorted(tra, key=lambda x: x[-1]))
except IndexError:
data_tra.append([])
print('Warning: There is no data in channel %d!' % idx)
pri = np.array([np.array(i.strip('\n').split(', ')).astype(np.float32) for i in pri])
chan_1 = pri[np.where(pri[:, 2] == 1)[0]]
chan_2 = pri[np.where(pri[:, 2] == 2)[0]]
chan_3 = pri[np.where(pri[:, 2] == 3)[0]]
chan_4 = pri[np.where(pri[:, 2] == 4)[0]]
end = time.time()
print("=" * 46 + " Report " + "=" * 46)
print("Calculation Info--Quantity of valid data: %s" % pri.shape[0])
print("Waveform Info--Channel 1: %d | Channel 2: %d | Channel 3: %d | Channel 4: %d" %
(len(data_tra[0]), len(data_tra[1]), len(data_tra[2]), len(data_tra[3])))
print("Features Info--All channel: %d | Channel 1: %d | Channel 2: %d | Channel 3: %d | Channel 4: %d" %
(pri.shape[0], chan_1.shape[0], chan_2.shape[0], chan_3.shape[0], chan_4.shape[0]))
print("Finishing time: {} | Time consumption: {:.3f} min".format(time.asctime(time.localtime(time.time())),
(end - start) / 60))
return data_tra[0], data_tra[1], data_tra[2], data_tra[3], pri, chan_1, chan_2, chan_3, chan_4
def main_read_pac_data(file_list, data_path, processor, threshold_dB, magnification_dB):
# check existing file
tar = data_path.split('/')[-1] + '.txt'
if tar in file_list:
exist_idx = np.where(np.array(file_list) == tar)[0][0]
file_list = file_list[0:exist_idx] + file_list[exist_idx+1:]
each_core = int(math.ceil(len(file_list) / float(processor)))
result, tra_1, tra_2, tra_3, tra_4 = [], [], [], [], []
data_tra = []
print("=" * 47 + " Start " + "=" * 46)
start = time.time()
# Multiprocessing acceleration
pool = multiprocessing.Pool(processes=processor)
for idx, i in enumerate(range(0, len(file_list), each_core)):
process = Preprocessing(idx, threshold_dB, magnification_dB, data_path, processor)
result.append(pool.apply_async(process.read_pac_data, (file_list[i:i + each_core],)))
QApplication.processEvents()
pbar = tqdm(result, ncols=100)
for idx, i in enumerate(pbar):
tmp_1, tmp_2, tmp_3, tmp_4 = i.get()
tra_1.append(tmp_1)
tra_2.append(tmp_2)
tra_3.append(tmp_3)
tra_4.append(tmp_4)
pbar.set_description("Exporting Data: {}/{}".format(idx + 1, processor))
pool.close()
pool.join()
for idx, tra in enumerate([tra_1, tra_2, tra_3, tra_4]):
tra = [j for i in tra for j in i]
try:
data_tra.append(sorted(tra, key=lambda x: x[-1]))
except IndexError:
data_tra.append([])
print('Warning: There is no data in channel %d!' % idx)
end = time.time()
print("=" * 46 + " Report " + "=" * 46)
print("Channel 1: %d | Channel 2: %d | Channel 3: %d | Channel 4: %d" %
(len(data_tra[0]), len(data_tra[1]), len(data_tra[2]), len(data_tra[3])))
print("Finishing time: {} | Time consumption: {:.3f} min".format(time.asctime(time.localtime(time.time())),
(end - start) / 60))
return data_tra[0], data_tra[1], data_tra[2], data_tra[3]
def main_read_pac_features(data_path):
dir_features = data_path.split('/')[-1] + '.txt'
with open(dir_features, 'r') as f:
res = [i.strip("\n").strip(',') for i in f.readlines()[1:]]
print("=" * 47 + " Start " + "=" * 46)
start = time.time()
pri = np.array([np.array(i.strip('\n').split(', ')).astype(np.float32) for i in res])
chan_1 = pri[np.where(pri[:, 2] == 1)[0]]
chan_2 = pri[np.where(pri[:, 2] == 2)[0]]
chan_3 = pri[np.where(pri[:, 2] == 3)[0]]
chan_4 = pri[np.where(pri[:, 2] == 4)[0]]
end = time.time()
print("=" * 46 + " Report " + "=" * 46)
print("All channel: %d | Channel 1: %d | Channel 2: %d | Channel 3: %d | Channel 4: %d" %
(pri.shape[0], chan_1.shape[0], chan_2.shape[0], chan_3.shape[0], chan_4.shape[0]))
print("Finishing time: {} | Time consumption: {:.3f} min".format(time.asctime(time.localtime(time.time())),
(end - start) / 60))
return pri, chan_1, chan_2, chan_3, chan_4
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-path", "--data_path", type=str,
default=r"D:\data\3D porous TC4-8mA-compression test-z1-0.01-20201010",
help="Absolute path of data(add 'r' in front)")
parser.add_argument("-thr", "--threshold_dB", type=int, default=25, help="Detection threshold")
parser.add_argument("-mag", "--magnification_dB", type=int, default=60, help="Magnification /dB")
parser.add_argument("-cpu", "--processor", type=int, default=cpu_count(), help="Number of Threads")
parser.add_argument("-cnts", "--min_cnts", type=int, default=2, help="Number of Threads")
opt = parser.parse_args()
print("=" * 44 + " Parameters " + "=" * 44)
print(opt)
opt.data_path = opt.data_path.replace('\\', '/')
os.chdir(opt.data_path)
file_list = os.listdir(opt.data_path)
# print(file_list)
data_tra_1, data_tra_2, data_tra_3, data_tra_4, data_pri, chan_1, chan_2, chan_3, chan_4 = convert_pac_data(file_list, opt.data_path, opt.processor, opt.threshold_dB, opt.magnification_dB)
# data_tra_1, data_tra_2, data_tra_3, data_tra_4 = main_read_pac_data(file_list, opt.data_path, opt.processor, opt.threshold_dB, opt.magnification_dB)
# data_pri, chan_1, chan_2, chan_3, chan_4 = main_read_pac_features(opt.data_path, opt.processor, opt.threshold_dB, opt.magnification_dB, 6)
# chan = chan_1
# Time, Amp, RiseT, Dur, Eny, RMS, Counts = chan[:, 1], chan[:, 5], chan[:, 7] * pow(10, 6), chan[:, 8] * pow(10, 6), \
# chan[:, 9], chan[:, 10], chan[:, -1]
# feature_idx = [Amp, Dur, Eny]
# xlabelz = ['Amplitude (μV)', 'Duration (μs)', 'Energy (aJ)']
# ylabelz = ['PDF(A)', 'PDF(D)', 'PDF(E)']
# color_1 = [255 / 255, 0 / 255, 102 / 255] # red
# color_2 = [0 / 255, 136 / 255, 204 / 255] # blue
# status = 'test'
# features = Features(color_1, color_2, Time, feature_idx, status)
# features.plot_correlation(Amp, Eny, xlabelz[0], xlabelz[2])
# features.plot_correlation(Dur, Amp, xlabelz[1], xlabelz[0])
# features.plot_correlation(Dur, Eny, xlabelz[1], xlabelz[2])
# waveform = Waveform(color_1, color_2, data_tra_1, opt.path, 'test', status, 'pac', 24)
```
#### File: JonahY/AE_GUI/Visualization.py
```python
import sys, os
if hasattr(sys, 'frozen'):
os.environ['PATH'] = sys._MEIPASS + ";" + os.environ['PATH']
import sys
import os
from PyQt5 import QtWidgets, Qt
from authorization import Ui_Dialog
from alone_auth import AuthorizeWindow
from about_info import AboutWindow
from get_mac_addr import get_mac_address
from check_license import CheckLicense
from AEScoder import PrpCrypt
from Controller import MainForm
from multiprocessing import freeze_support
def app_path():
"""Returns the base application path."""
if hasattr(sys, 'frozen'):
# Handles PyInstaller
return os.path.dirname(sys.executable) # 使用pyinstaller打包后的exe目录
return os.path.dirname(__file__) # 没打包前的py目录
PROJECT_PATH = app_path()
class AuthWindow(QtWidgets.QDialog, Ui_Dialog):
def __init__(self, win_main):
super(AuthWindow, self).__init__()
self.setupUi(self)
self.setWindowFlags(Qt.Qt.WindowMinimizeButtonHint | Qt.Qt.WindowCloseButtonHint)
# self.setFixedSize(self.width(), self.height())
self.win_main = win_main
self.active_time = ''
self.psw = ''
self.abort.clicked.connect(self.close)
self.show_license.clicked.connect(self.get_license)
self.Read_license()
self.init_UI()
self.activate.clicked.connect(self.init_UI_2)
self.enter.clicked.connect(self.OPEN)
self.information.setText('This software is owned by Yuan.\nPlease send an email to apply for a license.\n'
'E-mail addresses: <EMAIL>\nInstitution: State Key Laboratory for '
'Mechanical Behavior of Materials, Xi’an Jiaotong University, Xi’an 710049, China')
def init_UI(self):
self.setWindowFlags(Qt.Qt.CustomizeWindowHint)
a = get_mac_address()
self.mac.setText(a)
self.init_UI_2()
def init_UI_2(self):
check_state = self.check_license_state()
if check_state == False:
self.license_file.setEnabled(True)
self.activate.setEnabled(True)
else:
self.license_file.setEnabled(False)
self.activate.setEnabled(False)
self.enter.setEnabled(True)
def check_license_state(self):
if self.active_time and self.psw:
check_time_result = CheckLicense().check_date(self.active_time)
check_psw_result = CheckLicense().check_psw(self.psw)
date_time = str(self.active_time).replace('T', ' ')
if check_psw_result:
if check_time_result:
self.activate_status.setText(f'Activation is successful!\nAuthorization ends in {date_time}.')
return True
else:
self.activate_status.setText(f'Activation code has expired!\nAuthorization ends in {date_time}.')
elif self.active_time == self.psw == False:
self.activate_status.setText(f'Activation is failed!\nPlease change license.')
else:
self.activate_status.setText(f'Not activated!\nPlease activate the software.')
return False
def get_license(self):
license_file_path, _ = QtWidgets.QFileDialog.getOpenFileName(self, "Select license", "C:/", "All Files (*.lic);;Text Files (*.txt)") # 设置文件扩展名过滤,注意用双分号间隔
filename = license_file_path.split('/')[-1]
if filename:
with open(license_file_path, 'r') as license_file:
x = license_file.readline()
files = self.get_license_files()
if files:
for file in files:
os.remove(PROJECT_PATH + '/lic/' + file)
license_file_path_new = PROJECT_PATH + '/lic/'+filename
with open(license_file_path_new, 'w') as f:
f.write(x)
f.close()
self.Read_license()
pass
def get_license_files(self):
path = PROJECT_PATH + '/lic'
files = os.listdir(path)
return files
def Read_license(self):
path = PROJECT_PATH + '/lic'
files = self.get_license_files()
if files:
filename = files[0]
self.license_file.setText(filename)
with open(path + '/' + filename, 'r', encoding='utf-8') as f:
lic_msg = f.read()
f.close()
# str to bytes
lic_msg = bytes(lic_msg, encoding="utf8")
pc = PrpCrypt('XJTU_MSE_MBM_714_753_yBc') # 初始化密钥
license_str = pc.decrypt(lic_msg) # 解密
if license_str:
license_dic = eval(license_str)
mac = license_dic['mac']
self.active_time = license_dic['time_str']
self.psw = license_dic['psw']
else:
self.active_time = False
self.psw = False
else:
self.license_file.setEnabled(True)
self.show_license.setEnabled(True)
def OPEN(self):
self.close()
self.win_main.show()
if __name__ == "__main__":
freeze_support()
app = QtWidgets.QApplication(sys.argv)
win_auth = AuthorizeWindow()
win_about = AboutWindow()
win_main = MainForm(win_auth, win_about)
win = AuthWindow(win_main)
win.show()
sys.exit(app.exec_())
``` |
{
"source": "JonahY/AE_NN_Cls",
"score": 2
} |
#### File: AE_NN_Cls/1/cls.py
```python
import os
import pandas as pd
import numpy as np
import math
import time
import argparse
import torch
from sklearn.model_selection import StratifiedKFold, train_test_split
from sklearn.preprocessing import StandardScaler
from torch.utils.data import DataLoader, Dataset
import torch.nn.functional as F
from torch.nn import Module
from torch import optim
import tqdm
from torch.utils.tensorboard import SummaryWriter
import datetime
from multiprocessing import cpu_count
from solver import Solver
from meter import Meter
from network import Classify_model
from dataset import classify_provider
import warnings
warnings.filterwarnings('ignore', category=FutureWarning)
class TrainVal():
def __init__(self, config, fold):
self.layer = config.layer
self.method = config.method
self.model = Classify_model(self.layer, self.method, training=True)
# if torch.cuda.is_available():
# self.model = torch.nn.DataParallel(self.model)
# self.model = self.model.cuda()
self.lr = config.lr
self.weight_decay = config.weight_decay
self.epoch = config.epoch
self.fold = fold
self.max_accuracy_valid = 0
self.solver = Solver(self.model, self.method)
# self.criterion = torch.nn.MSELoss()
self.criterion = torch.nn.CrossEntropyLoss()
# 初始化tensorboard
TIMESTAMP = "{0:%Y-%m-%dT%H-%M-%S-%d}-classify".format(datetime.datetime.now(), fold)
self.model_path = os.path.join(config.save_path, self.method, TIMESTAMP)
self.writer = SummaryWriter(log_dir=os.path.join(self.model_path, TIMESTAMP))
# if not os.path.exists(self.model_path):
# os.makedirs(self.model_path)
def train(self, train_loader, test_loader):
# optimizer = optim.SGD(self.model.parameters(), self.lr, weight_decay=self.weight_decay)
optimizer = optim.Adam(self.model.parameters(), self.lr, weight_decay=self.weight_decay)
lr_scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, self.epoch + 10)
global_step = 0
for epoch in range(self.epoch):
epoch += 1
epoch_loss = 0
self.model.train(True)
tbar = tqdm.tqdm(train_loader, ncols=100)
for i, (x, labels) in enumerate(tbar):
labels_predict = self.solver.forward(x)
labels_predict = torch.sigmoid(labels_predict)
loss = self.solver.cal_loss(labels, labels_predict, self.criterion).float()
epoch_loss += loss.item()
self.solver.backword(optimizer, loss)
params_groups_lr = str()
for group_ind, param_group in enumerate(optimizer.param_groups):
params_groups_lr = params_groups_lr + 'params_group_%d' % (group_ind) + ': %.12f, ' % (
param_group['lr'])
descript = "Fold: %d, Train Loss: %.7f, lr: %s" % (self.fold, loss.item(), params_groups_lr)
tbar.set_description(desc=descript)
lr_scheduler.step()
global_step += len(train_loader)
print('Finish Epoch [%d/%d], Average Loss: %.7f' % (epoch, self.epoch, epoch_loss / len(tbar)))
class_neg_accuracy, class_pos_accuracy, class_accuracy, neg_accuracy, pos_accuracy, accuracy, loss_valid = \
self.validation(test_loader)
if accuracy > self.max_accuracy_valid:
is_best = True
self.max_accuracy_valid = accuracy
else:
is_best = False
# state = {
# 'epoch': epoch,
# 'state_dict': self.model.module.state_dict(),
# 'max_accuracy_valid': self.max_accuracy_valid,
# }
state = {
'epoch': epoch,
'state_dict': self.model.state_dict(),
'max_accuracy_valid': self.max_accuracy_valid,
}
self.solver.save_checkpoint(
os.path.join(self.model_path, 'classify_fold%d_%s_%f.pth' % (
self.fold, self.method, self.max_accuracy_valid)), state, is_best)
self.writer.add_scalar('train_loss', epoch_loss / len(tbar), epoch)
self.writer.add_scalar('valid_loss', loss_valid, epoch)
self.writer.add_scalar('valid_accuracy', accuracy, epoch)
self.writer.add_scalar('valid_class_0_accuracy', class_accuracy[0], epoch)
self.writer.add_scalar('valid_class_1_accuracy', class_accuracy[1], epoch)
def validation(self, test_loader):
self.model.eval()
self.model.train(False)
meter = Meter()
tbar = tqdm.tqdm(test_loader, ncols=100)
loss_sum = 0
with torch.no_grad():
for i, (x, labels) in enumerate(tbar):
labels_predict = self.solver.forward(x)
labels_predict = torch.sigmoid(labels_predict)
loss = self.solver.cal_loss(labels, labels_predict, self.criterion)
loss_sum += loss.item()
meter.update(labels, labels_predict.cpu())
descript = "Val Loss: {:.7f}".format(loss.item())
tbar.set_description(desc=descript)
loss_mean = loss_sum / len(tbar)
class_neg_accuracy, class_pos_accuracy, class_accuracy, neg_accuracy, pos_accuracy, accuracy = meter.get_metrics()
print(
"Class_0_accuracy: %0.4f | Class_1_accuracy: %0.4f | Negative accuracy: %0.4f | positive accuracy: %0.4f | accuracy: %0.4f" %
(class_accuracy[0], class_accuracy[1], neg_accuracy, pos_accuracy, accuracy))
return class_neg_accuracy, class_pos_accuracy, class_accuracy, neg_accuracy, pos_accuracy, accuracy, loss_mean
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--fold', type=str, default=r'./Ni_dislocation.csv')
parser.add_argument('--save_path', type=str, default='./checkpoints')
parser.add_argument('--num_workers', type=int, default=cpu_count())
parser.add_argument('--lr', type=float, default=0.001, help='init lr')
parser.add_argument('--weight_decay', type=float, default=0, help='weight_decay in optimizer')
parser.add_argument('--n_splits', type=int, default=1, help='n_splits_fold')
parser.add_argument('--batch_size', type=int, default=128, help='batch size')
parser.add_argument('--epoch', type=int, default=150, help='epoch')
parser.add_argument("--layer", type=int, nargs='+', default=[10, 10, 10])
parser.add_argument("--method", type=str, default='origin', help='origin, 15_select or 6_select')
config = parser.parse_args()
print(config)
dataloaders = classify_provider(config.fold, config.n_splits, config.batch_size, config.num_workers, config.method)
for fold_index, [train_loader, valid_loader, test_loader] in enumerate(dataloaders):
train_val = TrainVal(config, fold_index)
train_val.train(train_loader, test_loader)
```
#### File: AE_NN_Cls/1/dataset.py
```python
import os
import pandas as pd
import numpy as np
import math
import torch
from sklearn.model_selection import StratifiedKFold, train_test_split
from sklearn.preprocessing import StandardScaler
from torch.utils.data import DataLoader, Dataset
import torch.nn.functional as F
from torch.nn import Module
from torch import optim
import datetime
def classify_provider(fold, n_splits, batch_size, num_workers, method='15_select'):
data = pd.read_csv(fold).astype(np.float32)
feature = data.iloc[:, :-1]
label = np.array(data.iloc[:, -1].tolist()).reshape(-1, 1)
ext = np.zeros([label.shape[0], 1]).astype(np.float32)
ext[np.where(label == 0)[0]] = 1
label = np.concatenate((label, ext), axis=1)
if method == '6_select':
feature['Counts/Dur'] = feature['Counts'] / feature['Dur']
feature['RiseT/Dur'] = feature['RiseT'] / feature['Dur']
feature['Eny/Dur'] = feature['Eny'] / feature['Dur']
feature['Amp*RiseT'] = feature['Amp'] * feature['RiseT']
feature = feature[['Eny', 'Amp*RiseT', 'Dur', 'RMS', 'Counts/Dur', 'RiseT/Dur']].values
else:
feature = feature.values
# mean = np.average(feature, axis=0).reshape(1, -1)
# mean = np.repeat(mean, feature.shape[0], axis=0)
train_dfs = list()
val_dfs = list()
all_dfs = list()
if n_splits != 1:
skf = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=69)
for train_df_index, val_df_index in skf.split(feature, label[:, 0]):
train_dfs.append([feature[train_df_index], label[train_df_index, :]])
val_dfs.append([feature[val_df_index], label[val_df_index, :]])
else:
df_temp = train_test_split(feature, label, test_size=0.2, stratify=label, random_state=69)
train_dfs.append([df_temp[0], df_temp[2]])
val_dfs.append([df_temp[1], df_temp[3]])
all_dfs.append([np.concatenate((df_temp[0], df_temp[1]), axis=0),
np.concatenate((df_temp[2], df_temp[3]), axis=0)])
# print(len(train_dfs), len(val_dfs), len(all_dfs))
dataloaders = list()
for df_index, (train_df, val_df, all_df) in enumerate(zip(train_dfs, val_dfs, all_dfs)):
train_dataset = SteelClassDataset(train_df)
val_dataset = SteelClassDataset(val_df)
all_dataset = SteelClassDataset(all_df)
train_dataloader = DataLoader(train_dataset,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=False,
shuffle=True)
val_dataloader = DataLoader(val_dataset,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=False,
shuffle=False)
all_dataloader = DataLoader(all_dataset,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=False,
shuffle=False)
dataloaders.append([train_dataloader, val_dataloader, all_dataloader])
return dataloaders
class SteelClassDataset(Dataset):
def __init__(self, dataset):
super(SteelClassDataset, self).__init__()
self.feature = dataset[0]
self.label = dataset[1]
def __getitem__(self, idx):
x = self.feature[idx]
y = self.label[idx]
return x, y
def __len__(self):
return len(self.label)
```
#### File: AE_NN_Cls/1/utils.py
```python
import sqlite3
from tqdm import tqdm
import numpy as np
import array
import sys
import math
import os
import multiprocessing
import shutil
import pandas as pd
from scipy.signal import savgol_filter
class Reload:
def __init__(self, path_pri, path_tra, fold):
self.path_pri = path_pri
self.path_tra = path_tra
self.fold = fold
def sqlite_read(self, path):
"""
python读取sqlite数据库文件
"""
mydb = sqlite3.connect(path) # 链接数据库
mydb.text_factory = lambda x: str(x, 'gbk', 'ignore')
cur = mydb.cursor() # 创建游标cur来执行SQL语句
# 获取表名
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
Tables = cur.fetchall() # Tables 为元组列表
# 获取表结构的所有信息
if path[-5:] == 'pridb':
cur.execute("SELECT * FROM {}".format(Tables[3][0]))
res = cur.fetchall()[-2][1]
elif path[-5:] == 'tradb':
cur.execute("SELECT * FROM {}".format(Tables[1][0]))
res = cur.fetchall()[-3][1]
return int(res)
def read_with_time(self, time):
conn_pri = sqlite3.connect(self.path_pri)
result_pri = conn_pri.execute(
"Select SetID, Time, Chan, Thr, Amp, RiseT, Dur, Eny, RMS, Counts, TRAI FROM view_ae_data")
chan_1, chan_2, chan_3, chan_4 = [], [], [], []
t = [[] for _ in range(len(time) - 1)]
N_pri = self.sqlite_read(self.path_pri)
for _ in tqdm(range(N_pri)):
i = result_pri.fetchone()
if i[-2] is not None and i[-2] >= 6 and i[-1] > 0:
for idx, chan in zip(np.arange(1, 5), [chan_1, chan_2, chan_3, chan_4]):
if i[2] == idx:
chan.append(i)
for j in range(len(t)):
if time[j] <= i[1] < time[j + 1]:
t[j].append(i)
break
break
chan_1 = np.array(chan_1)
chan_2 = np.array(chan_2)
chan_3 = np.array(chan_3)
chan_4 = np.array(chan_4)
return t, chan_1, chan_2, chan_3, chan_4
def read_vallen_data(self, lower=2, t_cut=float('inf'), mode='all'):
data_tra, data_pri, chan_1, chan_2, chan_3, chan_4 = [], [], [], [], [], []
if mode == 'all' or mode == 'tra only':
conn_tra = sqlite3.connect(self.path_tra)
result_tra = conn_tra.execute(
"Select Time, Chan, Thr, SampleRate, Samples, TR_mV, Data, TRAI FROM view_tr_data")
N_tra = self.sqlite_read(self.path_tra)
for _ in tqdm(range(N_tra), ncols=80):
i = result_tra.fetchone()
if i[0] > t_cut:
continue
data_tra.append(i)
if mode == 'all' or mode == 'pri only':
conn_pri = sqlite3.connect(self.path_pri)
result_pri = conn_pri.execute(
"Select SetID, Time, Chan, Thr, Amp, RiseT, Dur, Eny, RMS, Counts, TRAI FROM view_ae_data")
N_pri = self.sqlite_read(self.path_pri)
for _ in tqdm(range(N_pri), ncols=80):
i = result_pri.fetchone()
if i[0] > t_cut:
continue
if i[-2] is not None and i[-2] > lower and i[-1] > 0:
data_pri.append(i)
if i[2] == 1:
chan_1.append(i)
if i[2] == 2:
chan_2.append(i)
elif i[2] == 3:
chan_3.append(i)
elif i[2] == 4:
chan_4.append(i)
data_tra = sorted(data_tra, key=lambda x: x[-1])
data_pri = np.array(data_pri)
chan_1 = np.array(chan_1)
chan_2 = np.array(chan_2)
chan_3 = np.array(chan_3)
chan_4 = np.array(chan_4)
return data_tra, data_pri, chan_1, chan_2, chan_3, chan_4
def read_pac_data(self, path, lower=2):
os.chdir(path)
dir_features = os.listdir(path)[0]
data_tra, data_pri, chan_1, chan_2, chan_3, chan_4 = [], [], [], [], [], []
with open(dir_features, 'r') as f:
data_pri = np.array([j.strip(', ') for i in f.readlines()[1:] for j in i.strip("\n")])
for _ in tqdm(range(N_tra), ncols=80):
i = result_tra.fetchone()
data_tra.append(i)
for _ in tqdm(range(N_pri), ncols=80):
i = result_pri.fetchone()
if i[-2] is not None and i[-2] > lower and i[-1] > 0:
data_pri.append(i)
if i[2] == 1:
chan_1.append(i)
if i[2] == 2:
chan_2.append(i)
elif i[2] == 3:
chan_3.append(i)
elif i[2] == 4:
chan_4.append(i)
data_tra = sorted(data_tra, key=lambda x: x[-1])
data_pri = np.array(data_pri)
chan_1 = np.array(chan_1)
chan_2 = np.array(chan_2)
chan_3 = np.array(chan_3)
chan_4 = np.array(chan_4)
return data_tra, data_pri, chan_1, chan_2, chan_3, chan_4
def export_feature(self, t, time):
for i in range(len(time) - 1):
with open(self.fold + '-%d-%d.txt' % (time[i], time[i + 1]), 'w') as f:
f.write('SetID, TRAI, Time, Chan, Thr, Amp, RiseT, Dur, Eny, RMS, Counts\n')
# ID, Time(s), Chan, Thr(μV), Thr(dB), Amp(μV), Amp(dB), RiseT(s), Dur(s), Eny(aJ), RMS(μV), Counts, Frequency(Hz)
for i in t[i]:
f.write('{}, {}, {:.8f}, {}, {:.7f}, {:.7f}, {:.2f}, {:.2f}, {:.7f}, {:.7f}, {}\n'.format(
i[0], i[-1], i[1], i[2], i[3], i[4], i[5], i[6], i[7], i[8], i[9]))
class Export:
def __init__(self, chan, data_tra, features_path):
self.data_tra = data_tra
self.features_path = features_path
self.chan = chan
def find_idx(self):
Res = []
for i in self.data_tra:
Res.append(i[-1])
Res = np.array(Res)
return Res
def detect_folder(self):
tar = './waveform'
if not os.path.exists(tar):
os.mkdir(tar)
else:
print("=" * 46 + " Warning " + "=" * 45)
while True:
ans = input(
"The exported data file has been detected. Do you want to overwrite it: (Enter 'yes' or 'no') ")
if ans.strip() == 'yes':
shutil.rmtree(tar)
os.mkdir(tar)
break
elif ans.strip() == 'no':
sys.exit(0)
print("Please enter 'yes' or 'no' to continue!")
def export_waveform(self, chan, thread_id=0, status='normal'):
if status == 'normal':
self.detect_folder()
Res = self.find_idx()
pbar = tqdm(chan, ncols=80)
for i in pbar:
trai = i[-1]
try:
j = self.data_tra[int(trai - 1)]
except IndexError:
try:
idx = np.where(Res == trai)[0][0]
j = self.data_tra[idx]
except IndexError:
print('Error 1: TRAI:{} in Channel is not found in data_tra!'.format(trai))
continue
if j[-1] != trai:
try:
idx = np.where(Res == trai)[0][0]
j = self.data_tra[idx]
except IndexError:
print('Error 2: TRAI:{} in Channel is not found in data_tra!'.format(trai))
continue
sig = np.multiply(array.array('h', bytes(j[-2])), j[-3] * 1000)
with open('./waveform/' + self.features_path[:-4] + '_{:.0f}_{:.8f}.txt'.format(trai, j[0]), 'w') as f:
f.write('Amp(uV)\n')
for a in sig:
f.write('{}\n'.format(a))
pbar.set_description("Process: %s | Exporting: %s" % (thread_id, int(trai)))
def accelerate_export(self, N=4):
# check existing file
self.detect_folder()
# Multiprocessing acceleration
each_core = int(math.ceil(self.chan.shape[0] / float(N)))
pool = multiprocessing.Pool(processes=N)
result = []
for idx, i in enumerate(range(0, self.chan.shape[0], each_core)):
result.append(pool.apply_async(self.export_waveform, (self.chan[i:i + each_core], idx + 1, 'accelerate',)))
pool.close()
pool.join()
print('Finished export of waveforms!')
return result
def material_status(component, status):
if component == 'pure':
if status == 'random':
# 0.508, 0.729, 1.022, 1.174, 1.609
idx_select_2 = [105, 94, 95, 109, 102]
TRAI_select_2 = [4117396, 4115821, 4115822, 4117632, 4117393]
# -0.264, -0.022
idx_select_1 = [95, 60]
TRAI_select_1 = [124104, 76892]
idx_same_amp_1 = [45, 62, 39, 41, 56]
TRAI_same_amp_1 = [88835, 114468, 82239, 84019, 104771]
idx_same_amp_2 = [61, 118, 139, 91, 136]
TRAI_same_amp_2 = [74951, 168997, 4114923, 121368, 4078227]
elif component == 'electrolysis':
if status == 'random':
# 0.115, 0.275, 0.297, 0.601, 1.024
idx_select_2 = [50, 148, 51, 252, 10]
TRAI_select_2 = [3067, 11644, 3079, 28583, 1501]
# 0.303, 0.409, 0.534, 0.759, 1.026
idx_select_1 = [13, 75, 79, 72, 71]
TRAI_select_1 = [2949, 14166, 14815, 14140, 14090]
if status == 'amp':
idx_select_2 = [90, 23, 48, 50, 29]
TRAI_select_2 = [4619, 2229, 2977, 3014, 2345]
idx_select_1 = [16, 26, 87, 34, 22]
TRAI_select_1 = [3932, 7412, 16349, 9001, 6300]
elif status == 'eny':
idx_select_2 = [79, 229, 117, 285, 59]
TRAI_select_2 = [4012, 22499, 7445, 34436, 3282]
idx_select_1 = [160, 141, 57, 37, 70]
TRAI_select_1 = [26465, 23930, 11974, 9379, 13667]
return idx_select_1, idx_select_2, TRAI_select_1, TRAI_select_2
def validation(k):
# Time, Amp, RiseTime, Dur, Eny, Counts, TRAI
i = data_tra[k]
sig = np.multiply(array.array('h', bytes(i[-2])), i[-3] * 1000)
time = np.linspace(i[0], i[0] + pow(i[-5], -1) * (i[-4] - 1), i[-4])
thr = i[2]
valid_wave_idx = np.where(abs(sig) >= thr)[0]
valid_time = time[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
start = time[valid_wave_idx[0]]
end = time[valid_wave_idx[-1]]
duration = (end - start) * pow(10, 6)
max_idx = np.argmax(abs(sig))
amplitude = max(abs(sig))
rise_time = (time[max_idx] - start) * pow(10, 6)
valid_data = sig[valid_wave_idx[0]:(valid_wave_idx[-1] + 1)]
energy = np.sum(np.multiply(pow(valid_data, 2), pow(10, 6) / i[3]))
RMS = math.sqrt(energy / duration)
count, idx = 0, 1
N = len(valid_data)
for idx in range(1, N):
if valid_data[idx - 1] >= thr > valid_data[idx]:
count += 1
# while idx < N:
# if min(valid_data[idx - 1], valid_data[idx]) <= thr < max((valid_data[idx - 1], valid_data[idx])):
# count += 1
# idx += 2
# continue
# idx += 1
print(i[0], amplitude, rise_time, duration, energy / pow(10, 4), count, i[-1])
def val_TRAI(data_pri, TRAI):
# Time, Amp, RiseTime, Dur, Eny, Counts, TRAI
for i in TRAI:
vallen = data_pri[i - 1]
print('-' * 80)
print('{:.8f} {} {} {} {} {:.0f} {:.0f}'.format(vallen[1], vallen[4], vallen[5], vallen[6],
vallen[-4], vallen[-2], vallen[-1]))
validation(i - 1)
def save_E_T(Time, Eny, cls_1_KKM, cls_2_KKM, time, displace, smooth_load, strain, smooth_stress):
df_1 = pd.DataFrame({'time_pop1': Time[cls_KKM[0]], 'energy_pop1': Eny[cls_KKM[0]]})
df_2 = pd.DataFrame({'time_pop2': Time[cls_KKM[1]], 'energy_pop2': Eny[cls_KKM[1]]})
df_3 = pd.DataFrame(
{'time': time, 'displace': displace, 'load': smooth_load, 'strain': strain, 'stress': smooth_stress})
df_1.to_csv('E-T_electrolysis_pop1.csv')
df_2.to_csv('E-T_electrolysis_pop2.csv')
df_3.to_csv('E-T_electrolysis_RawData.csv')
def load_stress(path_curve):
data = pd.read_csv(path_curve, encoding='gbk').drop(index=[0]).astype('float32')
data_drop = data.drop_duplicates(['拉伸应变 (应变 1)'])
time = np.array(data_drop.iloc[:, 0])
displace = np.array(data_drop.iloc[:, 1])
load = np.array(data_drop.iloc[:, 2])
strain = np.array(data_drop.iloc[:, 3])
stress = np.array(data_drop.iloc[:, 4])
sort_idx = np.argsort(strain)
strain = strain[sort_idx]
stress = stress[sort_idx]
return time, displace, load, strain, stress
def smooth_curve(time, stress, window_length=99, polyorder=1, epoch=200, curoff=[2500, 25000]):
y_smooth = savgol_filter(stress, window_length, polyorder, mode= 'nearest')
for i in range(epoch):
if i == 5:
front = y_smooth
y_smooth = savgol_filter(y_smooth, window_length, polyorder, mode= 'nearest')
front_idx = np.where(time < curoff[0])[0][-1]
rest_idx = np.where(time > curoff[1])[0][0]
res = np.concatenate((stress[:40], front[40:front_idx], y_smooth[front_idx:rest_idx], stress[rest_idx:]))
return res
def filelist_convert(data_path, tar=None):
file_list = os.listdir(data_path)
if tar:
tar += '.txt'
else:
tar = data_path.split('/')[-1] + '.txt'
if tar in file_list:
exist_idx = np.where(np.array(file_list) == tar)[0][0]
file_list.pop(exist_idx)
file_idx = np.array([np.array(i[:-4].split('_')[1:]).astype('int64') for i in file_list])
return file_list, file_idx
``` |
{
"source": "JonahYeoh/custom_data_generator",
"score": 3
} |
#### File: custom_data_generator/activity_recognition/data_gen.py
```python
import pandas as pd
import cv2
import numpy as np
from sklearn.utils import shuffle
import os
from collections import deque
import copy
import matplotlib
import matplotlib.pyplot as plt
from keras.utils import np_utils
from config import Config
class ActionDataGenerator(object):
def __init__(self,root_data_path,temporal_stride=1,temporal_length=16,resize=224):
self.root_data_path = root_data_path
self.temporal_length = temporal_length
self.temporal_stride = temporal_stride
self.resize=resize
def file_generator(self,data_path,data_files):
'''
data_files - list of csv files to be read.
'''
for f in data_files:
tmp_df = pd.read_csv(os.path.join(data_path,f))
label_list = list(tmp_df['Label'])
total_images = len(label_list)
if total_images>=self.temporal_length:
num_samples = int((total_images-self.temporal_length)/self.temporal_stride)+1
print ('num of samples from vid seq-{}: {}'.format(f,num_samples))
img_list = list(tmp_df['FileName'])
else:
print ('num of frames is less than temporal length; hence discarding this file-{}'.format(f))
continue
start_frame = 0
samples = deque()
samp_count=0
for img in img_list:
samples.append(img)
if len(samples)==self.temporal_length:
samples_c=copy.deepcopy(samples)
samp_count+=1
for t in range(self.temporal_stride):
samples.popleft()
yield samples_c,label_list[0]
def load_samples(self,data_cat='train'):
data_path = os.path.join(self.root_data_path,data_cat)
csv_data_files = os.listdir(data_path)
file_gen = self.file_generator(data_path,csv_data_files)
iterator = True
data_list = []
while iterator:
try:
x,y = next(file_gen)
x=list(x)
data_list.append([x,y])
except Exception as e:
print ('the exception: ',e)
iterator = False
print ('end of data generator')
return data_list
def shuffle_data(self,samples):
data = shuffle(samples,random_state=2)
return data
def preprocess_image(self,img):
img = cv2.resize(img,(self.resize,self.resize))
img = img/255
return img
def data_generator(self,data,batch_size=10,shuffle=True):
"""
Yields the next training batch.
data is an array [[img1_filename,img2_filename...,img16_filename],label1], [image2_filename,label2],...].
"""
num_samples = len(data)
if shuffle:
data = self.shuffle_data(data)
while True:
for offset in range(0, num_samples, batch_size):
#print ('startring index: ', offset)
# Get the samples you'll use in this batch
batch_samples = data[offset:offset+batch_size]
# Initialise X_train and y_train arrays for this batch
X_train = []
y_train = []
# For each example
for batch_sample in batch_samples:
# Load image (X)
x = batch_sample[0]
y = batch_sample[1]
temp_data_list = []
for img in x:
try:
img = cv2.imread(img)
#apply any kind of preprocessing here
#img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
img = self.preprocess_image(img)
temp_data_list.append(img)
except Exception as e:
print (e)
print ('error reading file: ',img)
# Read label (y)
#label = label_names[y]
# Add example to arrays
X_train.append(temp_data_list)
y_train.append(y)
# Make sure they're numpy arrays (as opposed to lists)
X_train = np.array(X_train)
#X_train = np.rollaxis(X_train,1,4)
y_train = np.array(y_train)
y_train = np_utils.to_categorical(y_train, 3)
# The generator-y part: yield the next training batch
yield X_train, y_train
if __name__=='__main__':
root_data_path='data_files'
data_gen_obj=ActionDataGenerator(root_data_path,temporal_stride=1,temporal_length=16)
train_data = data_gen_obj.load_samples(data_cat='train')
print('num of train_samples: {}'.format(len(train_data)))
train_data[0]
test_data = data_gen_obj.load_samples(data_cat='test')
print('num of test_samples: {}'.format(len(test_data)))
train_generator = data_gen_obj.data_generator(train_data,batch_size=6,shuffle=True)
x,y = next(train_generator)
print ('x shape: ',x.shape)
print ('y shape: ',y.shape)
# analyse the first sample
x_0=x[0]
y_0=y[0]
print('x_0 shape: ',x_0.shape)
print('y_0 shape: ',y_0.shape)
print(Config.labels_to_class)
activity = Config.labels_to_class[np.argmax(y_0)]
print(activity)
# plot the first sample
num_of_images=16
fig=plt.figure(figsize=(8,8))
plt.title("one sample with {} frames ; activity:{}".format(num_of_images,activity))
subplot_num = int(np.ceil(np.sqrt(num_of_images)))
for i in range(int(num_of_images)):
ax = fig.add_subplot(subplot_num, subplot_num, i+1)
#ax.imshow(output_image[0,:,:,i],interpolation='nearest' ) #to see the first filter
ax.imshow(x_0[i,:,:,::-1])
plt.xticks([])
plt.yticks([])
plt.tight_layout()
plt.show()
```
#### File: custom_data_generator/flowers_recognition/custom_dataloader.py
```python
import os
import numpy as np
import pandas as pd
from PIL import Image
import cv2
from sklearn.utils import shuffle
from keras.utils import np_utils
import matplotlib.pyplot as plt
from config import Config
class FlowerRecognition(object):
"""
Implements a data loader that reads tha data and creates a data generator
which can be directly used to train your model
"""
def __init__(self,root_dir=None):
self.root_dir = root_dir
def load_samples(self,csv_file):
"""
function to read a csv file and create a list of samples of format
[[image1_filename,label1], [image2_filename,label2],...].
Args:
csv_file - csv file containing data information
Returns:
samples - a list of format [[image1_filename,label1], [image2_filename,label2],...]
"""
# Read the csv file
data = pd.read_csv(os.path.join(self.root_dir,'data_files',csv_file))
data = data[['FileName', 'Label', 'ClassName']]
# Get the filename contained in the first column
file_names = list(data.iloc[:,0])
# Get the labels present in the second column
labels = list(data.iloc[:,1])
samples=[]
for samp,lab in zip(file_names,labels):
samples.append([samp,lab])
return samples
def shuffle_data(self,data):
data = shuffle(data)#,random_state=2)
return data
def preprocessing(self,img,label):
img = cv2.resize(img,(Config.resize,Config.resize))
img = img/255
label = np_utils.to_categorical(label, Config.num_classes)
return img,label
def data_generator(self,data,batch_size=10,shuffle=True):
"""
Yields the next training batch.
Suppose `samples` is an array [[image1_filename,label1], [image2_filename,label2],...].
"""
num_samples = len(data)
if shuffle:
data = self.shuffle_data(data)
while True:
for offset in range(0, num_samples, batch_size):
# print ('startring index: ', offset)
# Get the samples you'll use in this batch
batch_samples = data[offset:offset+batch_size]
# Initialise X_train and y_train arrays for this batch
X_train = []
y_train = []
# For each example
for batch_sample in batch_samples:
# print (batch_sample)
# Load image (X)
# x = batch_sample[0]
img_name = batch_sample[0]
label = batch_sample[1]
img = cv2.imread(os.path.join(self.root_dir,img_name))
# print (img.shape)
# img = cv2.resize(img,(224,224))
# Preprocessing
img,label = self.preprocessing(img,label)
# print (img.shape)
X_train.append(img)
y_train.append(label)
# Make sure they're numpy arrays (as opposed to lists)
X_train = np.array(X_train)
# X_train = np.rollaxis(X_train,1,4)
y_train = np.array(y_train)
# The generator-y part: yield the next training batch
yield X_train, y_train
if __name__=='__main__':
dataloader = FlowerRecognition(root_dir=r'D:\Trainings-2019\custom_data_generator\flowers_recognition')
train_data_path = 'flowers_recognition_train.csv'
test_data_path = 'flowers_recognition_test.csv'
train_samples = dataloader.load_samples(train_data_path)
test_samples = dataloader.load_samples(test_data_path)
num_train_samples = len(train_samples)
num_test_samples = len(test_samples)
print ('number of train samples: ', num_train_samples)
print ('number of test samples: ', num_test_samples)
# Create generator
batch_size = Config.batch_size
train_datagen = dataloader.data_generator(train_samples, batch_size=batch_size)
test_datagen = dataloader.data_generator(test_samples, batch_size=batch_size)
for k in range(1):
x,y = next(train_datagen)
print ('x shape: ', x.shape)
print ('label shape: ', y.shape)
print ('the label is: ',y)
#train_samples[-15:-10]
#### we can plot the data and see by ourselves
fig = plt.figure(1,figsize=(12,12))
for i in range(8):
plt.subplot(4,4,i+1)
plt.tight_layout()
#x[i] = x[i][:,:,::-1] # converting BGR to RGB
plt.imshow(x[i][:,:,::-1], interpolation='none')
plt.title("class_label: {}".format(y[i]))
plt.xticks([])
plt.yticks([])
plt
```
#### File: custom_data_generator/flowers_recognition/train.py
```python
from keras.models import Sequential
from keras.layers import Dense, Conv2D, Flatten, Activation, MaxPooling2D, Dropout
from config import Config
from custom_dataloader import FlowerRecognition
def load_model(pretrained_weights=None):
"""
the method to load and returns pretrained or new model
Args:
pretrained_weights-pretrained weights file
Returns:
model - the loaded keras model
"""
input_shape = (Config.resize,Config.resize,3)
# print (input_shape)
model = Sequential()
#filters,kernel_size,strides=(1, 1),padding='valid',data_format=None,dilation_rate=(1, 1),activation=None,use_bias=True,
#kernel_initializer='glorot_uniform',bias_initializer='zeros',kernel_regularizer=None,bias_regularizer=None,
#activity_regularizer=None,kernel_constraint=None,bias_constraint=None,
#pool_size=(2, 2), strides=None, padding='valid',data_format=None
model.add(Conv2D(32, (3,3),padding='same',input_shape=input_shape,name='conv2d_1'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2),name='maxpool2d_1'))
model.add(Conv2D(32, (3, 3),name='conv2d_2'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2),name='maxpool2d_2'))
model.add(Dropout(0.5))
#model.add(Convolution2D(64, 3, 3))
#model.add(Activation('relu'))
#model.add(Convolution2D(64, 3, 3))
#model.add(Activation('relu'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
#model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(Config.num_classes))
model.add(Activation('softmax'))
if pretrained_weights:
model.load_weights(pretrained_weights)
return model
if __name__=='__main__':
#call the dataloader and create traina nd test dataloader object
dataloader = FlowerRecognition(root_dir=r'D:\Trainings-2019\custom_data_generator\flowers_recognition')
train_data_path = 'flowers_recognition_train.csv'
test_data_path = 'flowers_recognition_test.csv'
train_samples = dataloader.load_samples(train_data_path)
test_samples = dataloader.load_samples(test_data_path)
num_train_samples = len(train_samples)
num_test_samples = len(test_samples)
print ('number of train samples: ', num_train_samples)
print ('number of test samples: ', num_test_samples)
# Create generator
batch_size = Config.batch_size
train_generator = dataloader.data_generator(train_samples, batch_size=batch_size)
validation_generator = dataloader.data_generator(test_samples, batch_size=batch_size)
model = load_model()
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
hist=model.fit_generator(
train_generator,
steps_per_epoch=num_train_samples // batch_size,
epochs=Config.num_epochs,
validation_data=validation_generator,
validation_steps=num_test_samples // batch_size)
model.save_weights('first_try.h5')
``` |
{
"source": "JonahZeng/libjpeg_readme",
"score": 3
} |
#### File: example/jpeg_example/decompress_jpeg.py
```python
from ctypes import *
import numpy as np
class decoded_yuv_t(Structure):
_fields_ = [('image_width', c_int), ('image_height', c_int), ('buffer_y_width', c_int), ('buffer_y_height', c_int), \
('buffer_u_width', c_int), ('buffer_u_height', c_int), ('buffer_v_width', c_int), ('buffer_v_height', c_int), ('buffer_ptr', c_void_p)]
class decoded_rgb_t(Structure):
_fields_ = [('image_width', c_int), ('image_height', c_int), ('buffer_ptr', c_void_p)]
class jpeg_reader(object):
def __init__(self, jpeg_file:str):
super().__init__()
self.file_name = jpeg_file
self.reader = CDLL('./jpeg_example.dll')
self.reader.read_rgb_from_JPEG_file.restype = c_int
self.reader.read_rgb_from_JPEG_file.argtypes = (c_char_p, POINTER(decoded_rgb_t))
self.reader.read_raw_data_from_JPEG_file.restype = c_int
self.reader.read_raw_data_from_JPEG_file.argtypes = (c_char_p, POINTER(decoded_yuv_t))
def get_yuv_data(self):
out_yuv = decoded_yuv_t()
ret = reader.read_raw_data_from_JPEG_file(self.file_name, byref(out_yuv))
if ret < 0:
print('call read_raw_data_from_JPEG_file fail')
return None
else:
print('call read_raw_data_from_JPEG_file success')
y_width, y_height = out_yuv.buffer_y_width, out_yuv.buffer_y_height
u_width, u_height = out_yuv.buffer_u_width, out_yuv.buffer_u_height
v_width, v_height = out_yuv.buffer_v_width, out_yuv.buffer_v_height
buf_from_mem = pythonapi.PyMemoryView_FromMemory
buf_from_mem.restype = py_object
buffer = buf_from_mem(c_void_p(out_yuv.buffer_ptr), y_width*y_height+u_width*u_height+v_width*v_height)
yuv = np.frombuffer(buffer, np.uint8)
return yuv
def get_rgb_data(self):
out_rgb = decoded_rgb_t()
ret = reader.read_rgb_from_JPEG_file(b'test.JPG', byref(out_rgb))
if ret < 0:
print('call read_rgb_from_JPEG_file fail')
return None
else:
print('call read_rgb_from_JPEG_file success')
width, height = out_rgb.image_width, out_rgb.image_height
print(width, height)
buf_from_mem = pythonapi.PyMemoryView_FromMemory
buf_from_mem.restype = py_object
buffer = buf_from_mem(c_void_p(out_rgb.buffer_ptr), width*height*3)
rgb = np.frombuffer(buffer, np.uint8)
return rgb
def releaseBuffer(self):
self.reader.release_buffer()
``` |
{
"source": "jonajames/mc2mt",
"score": 3
} |
#### File: mc2mt/mc2mtlib/block_conversion.py
```python
import os,json
from .block_functions import *
report_known_blocks = False
report_unknown_blocks = True
unknown_as_air = False
converted_blocks = {}
mods_available = {}
mods_priority = []
mods_enabled = {}
def str_mod(name):
author = mods_available[name]['author']
download = mods_available[name]['download']
description = mods_available[name]['description']
return f"[ {name} ] by {author}\n\t{description}\n\t{download}"
def load_mod(mod_file):
if mod_file[-5:] != '.json': return
mod = {
'name': 'unknown',
'author': 'anonymous',
'description': 'No description provided.',
'download': 'No download provided.',
'enabled': True,
'priority': 0,
'table': {},
}
with open(mod_file) as json_file:
try:
load = json.load(json_file)
except json.decoder.JSONDecodeError as e:
print("Error in mod:",mod_file)
print(e)
exit(1)
mod.update(load)
mods_available[mod['name']] = mod
mods_enabled[mod['name']] = mod['enabled']
mods_priority.append((mod['priority'],mod['name']))
mods_priority.sort()
def load_mods_from_path():
mod_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),"mods")
for mod_filename in os.listdir(mod_path):
load_mod(os.path.join(mod_path,mod_filename))
def find_in_table(table,key):
if key in table: return key
parts = key.split("_")
for i in range(1,len(parts)):
key_part = ("_".join(parts[:i]))+"?"
if key_part in table: return key_part
for i in range(1,len(parts)):
key_part = "?"+("_".join(parts[i:]))
if key_part in table: return key_part
def get_from_table(table,block):
key = find_in_table(table,block.id)
if not key: return
param0,param1,param2 = table[key]
try:
if type(param0)==str and param0[0]=="@":
param0 = (globals()[param0[1:]])(block)
if type(param1)==str and param1[0]=="@":
param1 = (globals()[param1[1:]])(block)
if type(param2)==str and param2[0]=="@":
param2 = (globals()[param2[1:]])(block)
except Exception as e:
print_block("ERROR",block)
raise e
return param0,param1,param2
def convert_block(block):
# Get conversion from cache
if block.id == "air": return ("air",15,0)
if str_block(block) in converted_blocks:
return converted_blocks[str_block(block)]
# Get conversion from mod
for priority,mod_name in mods_priority:
if not mods_enabled[mod_name]: continue
mod_table = mods_available[mod_name]['table']
converted = get_from_table(mod_table,block)
if converted:
converted_blocks[str_block(block)] = converted
if report_known_blocks: print_block("ConvertedBlock",block)
return converted
# Unknown block
if unknown_as_air: converted = ("air",15,0)
else: converted = (f"mc2mt:{block.id}",0,0)
converted_blocks[str_block(block)] = converted
if report_unknown_blocks: print_block("UnknownBlock",block)
return converted
def print_block(prefix,block):
print(prefix,str_block(block),sep="~")
def str_block(block):
string = str(block.id) + "~{"
if block.properties == {}:
return string + " }"
for p in sorted(block.properties.keys()):
string += "'" + str(p) + "':'" + str(block.properties[p]) + "', "
return string[:-2] + "}"
``` |
{
"source": "Jonak-Adipta-Kalita/JAK-Discord-Bot",
"score": 2
} |
#### File: src/cogs/help.py
```python
import disnake
import src.core.embeds as embeds
import src.core.functions as funcs
from disnake.ext import commands
prefix = funcs.get_prefix()
class Dropdown(disnake.ui.Select):
def __init__(self, ctx: commands.Context, bot: commands.Bot):
self.ctx = ctx
self.bot = bot
super().__init__(
placeholder="Choose a category!!",
min_values=1,
max_values=1,
options=[
disnake.SelectOption(
label="Moderation Help",
value="moderation_help_embed",
),
disnake.SelectOption(label="Games Help", value="games_help_embed"),
disnake.SelectOption(label="Music Help", value="music_help_embed"),
disnake.SelectOption(label="Fun Help", value="fun_help_embed"),
disnake.SelectOption(label="Misc Help", value="misc_help_embed"),
],
)
async def callback(self, interaction: disnake.MessageInteraction):
label = interaction.values[0]
if label == "moderation_help_embed":
await interaction.response.edit_message(
embed=embeds.moderation_help_embed(
ctx=self.ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
elif label == "games_help_embed":
await interaction.response.edit_message(
embed=embeds.games_help_embed(
ctx=self.ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
elif label == "music_help_embed":
await interaction.response.edit_message(
embed=embeds.music_help_embed(
ctx=self.ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
elif label == "fun_help_embed":
await interaction.response.edit_message(
embed=embeds.fun_help_embed(
ctx=self.ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
elif label == "misc_help_embed":
await interaction.response.edit_message(
embed=embeds.misc_help_embed(
ctx=self.ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
class DropdownView(disnake.ui.View):
def __init__(self, ctx: commands.Context, bot: commands.Bot):
super().__init__(timeout=None)
self.add_item(Dropdown(ctx, bot))
class Help(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.group(invoke_without_command=True, description="Show the Help Menu")
async def help(self, ctx: commands.Context, command: str = None):
if command:
cmd = self.bot.get_command(command)
if cmd:
await ctx.reply(
embed=embeds.commands_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
command=cmd,
)
)
else:
await ctx.reply("Command not found!!")
else:
await ctx.reply(
embed=embeds.help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
),
view=DropdownView(ctx=ctx, bot=self.bot),
)
@help.command(description="Show the Moderation Commands", aliases=["mod"])
async def moderation(self, ctx: commands.Context):
await ctx.reply(
embed=embeds.moderation_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
@help.command(description="Show the Game Commands")
async def games(self, ctx: commands.Context):
await ctx.reply(
embed=embeds.games_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
@help.command(description="Show the Music Commands")
async def music(self, ctx: commands.Context):
await ctx.reply(
embed=embeds.music_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
@help.command(description="Show the Fun Commands")
async def fun(self, ctx: commands.Context):
await ctx.reply(
embed=embeds.fun_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
@help.command(description="Show the Misc Commands")
async def misc(self, ctx: commands.Context):
await ctx.reply(
embed=embeds.misc_help_embed(
ctx=ctx,
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
)
)
def setup(bot: commands.Bot):
bot.add_cog(Help(bot))
```
#### File: cogs/slash/misc.py
```python
import disnake
import src.core.emojis as emojis
import src.core.embeds as embeds
from disnake.ext import commands
class Misc_(commands.Cog):
def __init__(self, bot: commands.Bot):
self.bot = bot
self.embed_blank_value: str = "\u200b"
@commands.slash_command(
description="Create a Poll",
options=[
disnake.Option(
name="question",
description="The Question!!",
type=disnake.OptionType.string,
required=True,
),
disnake.Option(
name="option1",
description="The First Option!!",
type=disnake.OptionType.string,
required=True,
),
disnake.Option(
name="option2",
description="The Second Option!!",
type=disnake.OptionType.string,
required=True,
),
disnake.Option(
name="option3",
description="The Third Option!!",
type=disnake.OptionType.string,
required=False,
),
],
)
async def poll(
self,
inter: disnake.ApplicationCommandInteraction,
question: str,
option1: str,
option2: str,
option3: str = None,
):
await inter.response.send_message(
embed=embeds.poll_embed(
question=question,
option1=option1,
option2=option2,
option3=option3,
)
)
msg = await inter.original_message()
await msg.add_reaction(emojis.alphabets["regional_indicator_a"])
await msg.add_reaction(emojis.alphabets["regional_indicator_b"])
if option3:
await msg.add_reaction(emojis.alphabets["regional_indicator_c"])
@commands.slash_command(description="Show the Rules")
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def show_rules(self, inter: disnake.ApplicationCommandInteraction):
rules = [
(f"{emojis.numbers['one']} No Negativity", self.embed_blank_value),
(f"{emojis.numbers['two']} No Spamming", self.embed_blank_value),
(f"{emojis.numbers['three']} No Swearing", self.embed_blank_value),
(
f"{emojis.numbers['four']} No Discriminatory Or Hate Speech",
self.embed_blank_value,
),
(f"{emojis.numbers['five']} No NSFW Content", self.embed_blank_value),
(
f"{emojis.numbers['six']} No Potentially Harmful Content",
self.embed_blank_value,
),
]
await inter.response.send_message(
embed=embeds.rules_embed(
bot_name=self.bot.user.name,
bot_avatar_url=self.bot.user.avatar.url,
rules=rules,
)
)
@commands.slash_command(description="Show the Latency")
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def latency(self, inter: disnake.ApplicationCommandInteraction):
await inter.response.send_message(f"Ping: {round(self.bot.latency * 1000)}")
@commands.slash_command(
description="Show the Details of a Member",
options=[
disnake.Option(
name="member",
description="The Member!!",
type=disnake.OptionType.mentionable,
required=False,
)
],
)
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def member_details(
self,
inter: disnake.ApplicationCommandInteraction,
member: disnake.Member = None,
):
if not member:
member = inter.author
fetched_member = await self.bot.fetch_user(inter.author.id)
else:
fetched_member = await self.bot.fetch_user(member.id)
await inter.response.send_message(
embed=embeds.member_details_embed(
member=member, fetched_member=fetched_member
)
)
@commands.slash_command(description="Show the Server Information")
@commands.cooldown(rate=1, per=10, type=commands.BucketType.user)
async def server_stats(self, inter: disnake.ApplicationCommandInteraction):
await inter.response.send_message(
embed=embeds.server_stats_embed(guild=inter.guild)
)
@commands.slash_command(
description="Shows the Source of a Message",
options=[
disnake.Option(
name="message_id",
description="Message ID of the Message to show source of!!",
type=disnake.OptionType.string,
required=True,
)
],
)
@commands.cooldown(rate=1, per=10, type=commands.BucketType.user)
async def message_source(
self,
inter: disnake.ApplicationCommandInteraction,
message_id: int,
):
msg = await inter.channel.fetch_message(message_id)
if not msg or not msg.content.strip():
await inter.response.send_message("Please provide a non-empty Message!!")
return
await inter.response.send_message(embed=embeds.message_source_embed(msg=msg))
@commands.slash_command(description="Displays the total number of Commands")
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def total_commands(self, inter: disnake.ApplicationCommandInteraction):
available_commands = [
command for command in self.bot.commands if not command.hidden
]
hidden_commands = [command for command in self.bot.commands if command.hidden]
await inter.response.send_message(
f"Available Commands: {len(available_commands)}\nHidden Commands: {len(hidden_commands)}"
)
@commands.slash_command(description="Display the Servers the Bot is in")
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def servers_in(self, inter: disnake.ApplicationCommandInteraction):
await inter.response.send_message(
embed=embeds.servers_in_embed(servers=self.bot.guilds)
)
def setup(bot: commands.Bot):
bot.add_cog(Misc_(bot))
``` |
{
"source": "Jonak-Adipta-Kalita/JAK-Python-Package",
"score": 4
} |
#### File: JAK-Python-Package/jak_python_package/mathematics.py
```python
class Mathematics:
"""
Do Mathematics with JAK Python Package
"""
def __init__(self, number: int):
"""
:param number: Number to do math with!!
:type number: Integer
"""
if number:
if isinstance(number, int):
self.number = number
else:
raise Exception("Number must be a Integer!!")
else:
print("Please Provide a Integer")
number = input(">> ")
if isinstance(number, int):
self.number = number
else:
raise Exception("Number must be a Integer!!")
def __repr__(self):
return f"Number: {self.number}"
def add(self, number: int) -> int:
"""
Add two Numbers
:param number: Number to Add!!
:return: Number + new Number
Basic usage:
>>> from jak_python_package.mathematics import Mathematics
>>> number = Mathematics(5)
>>> number.add(2)
7
"""
return self.number + number
def sub(self, number: int) -> int:
"""
Subtract two Numbers
:param number: Number to Subtract!!
:return: Number - new Number
Basic usage:
>>> from jak_python_package.mathematics import Mathematics
>>> number = Mathematics(5)
>>> number.sub(2)
3
"""
return self.number - number
def mul(self, number: int) -> int:
"""
Multiply two Numbers
:param number: Number to Multilpy!!
:return: Number * new Number
Basic usage:
>>> from jak_python_package.mathematics import Mathematics
>>> number = Mathematics(5)
>>> number.mul(2)
10
"""
return self.number * number
def div(self, number: int) -> int:
"""
Divide two Numbers
:param number: Number to Divide
:return: Number / new Number
Basic usage:
>>> from jak_python_package.mathematics import Mathematics
>>> number = Mathematics(5)
>>> number.div(2)
2.5
"""
return self.number / number
``` |
{
"source": "Jonak-Adipta-Kalita/JAK-Website",
"score": 2
} |
#### File: JAK-Website/your_profile/models.py
```python
from django.db import models
class Notification(models.Model):
notification_id = models.AutoField(primary_key=True)
notification_name = models.CharField(max_length=50, default="")
notification_text = models.CharField(max_length=300, default="")
def __str__(self):
return self.notification_name
```
#### File: JAK-Website/your_profile/views.py
```python
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib import messages
from .models import Notification
import json, requests
import credentials
def handle_change_password(request):
if request.user.is_authenticated and request.method == "POST":
current_pass = request.POST["changepass_currentpass"]
new_pass = request.POST["changepass_newpass"]
new_pass_confirm = request.POST["changepass_newpassconfirm"]
client_key = request.POST["g-recaptcha-response"]
secret_key = credentials.RECAPTCHA_SECRET_KEY
captchaData = {"secret": secret_key, "response": client_key}
r = requests.post(
"https://www.google.com/recaptcha/api/siteverify", data=captchaData
)
response = json.loads(r.text)
verify = response["success"]
if not request.user.check_password(current_pass):
messages.error(request, "Invalid Current Password!!")
return redirect("home")
if new_pass != new_pass_confirm:
messages.error(request, "Passwords do not match!!")
return redirect("home")
if not verify:
messages.error(request, "Invalid Recaptcha!!")
return redirect("home")
else:
user = User.objects.get(username=request.user.username)
user.set_password(<PASSWORD>)
user.save()
messages.success(request, "Password Changed Successfully!!")
return redirect("home")
return redirect("home")
else:
return render(request, "404Error.html")
def your_account(request):
if request.user.is_authenticated:
context = {"user": request.user}
return render(request, "your_profile/your_account.html", context)
else:
return render(request, "404Error.html")
def handle_signup(request):
if request.method == "POST":
username = request.POST["signUpUsername"]
fname = request.POST["fname"]
lname = request.POST["lname"]
email = request.POST["email"]
pass1 = request.POST["<PASSWORD>"]
pass2 = request.POST["signUp<PASSWORD>2"]
client_key = request.POST["g-recaptcha-response"]
secret_key = credentials.RECAPTCHA_SECRET_KEY
captchaData = {"secret": secret_key, "response": client_key}
r = requests.post(
"https://www.google.com/recaptcha/api/siteverify", data=captchaData
)
response = json.loads(r.text)
verify = response["success"]
if len(username) > 10:
messages.error(request, "Username must be under 10 Characters!!")
return redirect("home")
if not username.isalnum():
messages.error(
request, "Username should only contain Alpha-Numeric Characters!!"
)
return redirect("home")
if pass1 != pass2:
messages.error(request, "Passwords do not match!!")
return redirect("home")
if verify:
myUser = User.objects.create_user(username, email, pass1)
myUser.first_name = fname
myUser.last_name = lname
myUser.save()
messages.success(request, "Successfully Created User!!")
else:
messages.error(request, "Invalid Recaptcha/Credentials!!")
return redirect("home")
else:
return render(request, "404Error.html")
def handle_login(request):
if request.method == "POST":
loginUsername = request.POST["loginUsername"]
loginPassword = request.POST["loginPass"]
client_key = request.POST["g-recaptcha-response"]
secret_key = credentials.RECAPTCHA_SECRET_KEY
captchaData = {"secret": secret_key, "response": client_key}
r = requests.post(
"https://www.google.com/recaptcha/api/siteverify", data=captchaData
)
response = json.loads(r.text)
verify = response["success"]
user = authenticate(username=loginUsername, password=loginPassword)
if verify and user is not None:
login(request, user)
messages.success(request, "Successfully Logged In!!")
return redirect("home")
else:
messages.error(request, "Invalid Recaptcha/Credentials, Please Try Again!!")
return redirect("home")
else:
return render(request, "404Error.html")
def handle_logout(request):
if request.user.is_authenticated:
logout(request)
messages.success(request, "Successfully Logged Out!!")
return redirect("home")
else:
return render(request, "404Error.html")
def notifications(request):
if request.user.is_authenticated:
notifications = Notification.objects.all()
context = {"notifications": notifications}
return render(request, "your_profile/notifications.html", context)
else:
return render(request, "404Error.html")
``` |
{
"source": "jonakarl/mariadb_kernel",
"score": 3
} |
#### File: mariadb_kernel/mariadb_kernel/client_config.py
```python
import os
import json
class ClientConfig:
def __init__(self, log, name="mariadb_config.json"):
self.log = log
self.config_name = name
datadir = "/tmp/mariadb_kernel/datadir"
pidfile = "/tmp/mariadb_kernel/mysqld.pid"
socketfile = "/tmp/mariadb_kernel/mysqld.sock"
if "NB_USER" in os.environ:
datadir = os.path.join("/home/", os.environ["NB_USER"], "work", ".db")
self.default_config = {
"user": "root",
"host": "localhost",
"socket": socketfile,
"port": "3306",
"password": "",
"server_datadir": datadir, # Server specific option
"server_pid": pidfile, # Server specific option
"start_server": "True",
"client_bin": "mysql",
"server_bin": "mysqld",
"db_init_bin": "mysql_install_db",
"extra_server_config": [
"--no-defaults",
"--skip_log_error",
],
"extra_db_init_config": [
"--auth-root-authentication-method=normal",
],
}
self._load_config()
def _load_config(self):
path = self._config_path()
self.log.info(f"Loading config file at {path}...")
cfg = {}
using_default = False
try:
f = open(path, "r")
cfg = json.load(f)
except (OSError, json.JSONDecodeError) as e:
if isinstance(e, OSError):
self.log.info(
f"Config file {self.config_name} at {path} " "does not exist"
)
if isinstance(e, json.JSONDecodeError):
self.log.info(
f"Config file {self.config_name} at {path} "
f"is not valid JSON: {e}"
)
using_default = True
# We should abort loading the custom config if the user passes
# an unsupported option
customk = cfg.keys()
defaultk = self.default_config.keys()
if len(customk - defaultk) > 0:
self.log.info(
f"Config file {self.config_name} at {path} "
f"contains unsupported options: {customk - defaultk}"
)
using_default = True
if using_default:
self.log.info(
f"Using default config: {json.dumps(self.default_config, indent=4)}"
)
return
self.default_config.update(cfg)
def _config_path(self):
default_dir = os.path.join(os.path.expanduser("~"), ".jupyter")
custom_dir = os.environ.get("JUPYTER_CONFIG_DIR")
if custom_dir:
default_dir = custom_dir
return os.path.join(default_dir, self.config_name)
def get_args(self):
rv = ""
keys = ["user", "host", "port", "password", "socket"]
for key in keys:
value = self.default_config[key]
rv += f"--{key}={value} "
return rv
def get_server_args(self):
rv = []
rv.extend(self.default_config["extra_server_config"])
# Use same connection config for both server and client
rv.append(f"--socket={self.default_config['socket']}")
rv.append(f"--port={self.default_config['port']}")
rv.append(f"--bind-address={self.default_config['host']}")
# Server specific config
rv.append(f"--datadir={self.default_config['server_datadir']}")
rv.append(f"--pid-file={self.default_config['server_pid']}")
return rv
def get_init_args(self):
rv = []
rv.extend(self.get_server_args())
rv.extend(self.default_config["extra_db_init_config"])
return rv
def get_server_paths(self):
return [
os.path.dirname(self.default_config["socket"]),
os.path.dirname(self.default_config["server_datadir"]),
os.path.dirname(self.default_config["server_pid"]),
]
def get_server_pidfile(self):
return self.default_config["server_pid"]
def start_server(self):
return self.default_config["start_server"] == "True"
def client_bin(self):
return self.default_config["client_bin"]
def server_bin(self):
return self.default_config["server_bin"]
def db_init_bin(self):
return self.default_config["db_init_bin"]
```
#### File: mariadb_kernel/tests/test_magic_linemagic.py
```python
from unittest.mock import Mock, ANY
from pandas import DataFrame
from ..maria_magics.line_magic import LineMagic
def test_line_magic_generate_plot_detects_empty_dataframe():
mockkernel = Mock()
lm = LineMagic()
assert lm.type() == "Line"
data = {"last_select": DataFrame()}
lm.generate_plot(mockkernel, data, "testplot")
mockkernel._send_message.assert_called_once_with("stderr", ANY)
def test_line_magic_generate_plot_detects_ill_formatted_args():
mockkernel = Mock()
lm = LineMagic()
# invalid args
lm.args = "someplottype"
data = {"last_select": DataFrame([1, 1])}
lm.generate_plot(mockkernel, data, "testplot")
mockkernel._send_message.assert_called_once_with(
"stderr",
"There was an error while parsing the arguments. "
"Please check %lsmagic on how to use the magic command",
)
def test_line_magic_generate_plot_sends_error_when_plot_throws():
mockkernel = Mock()
lm = LineMagic()
# valid args
lm.args = "input=1"
data = {"last_select": DataFrame([1, 1])}
lm.generate_plot(mockkernel, data, "testplot")
# The type of plot is invalid
mockkernel._send_message.assert_called_once_with("stderr", ANY)
def test_line_magic_generate_plot_sends_error_when_index_invalid():
mockkernel = Mock()
lm = LineMagic()
lm.args = 'index="col"'
data = {"last_select": DataFrame([1, 1])}
lm.generate_plot(mockkernel, data, "pie")
mockkernel._send_message.assert_called_once_with("stderr", "Index does not exist")
def test_line_magic_generate_plot_sends_display_data():
mockkernel = Mock()
lm = LineMagic()
lm.args = ""
data = {"last_select": DataFrame([1, 1])}
lm.generate_plot(mockkernel, data, "line")
mockkernel.send_response.assert_called_once_with(ANY, "display_data", ANY)
```
#### File: mariadb_kernel/tests/test_mariadbserver.py
```python
import pytest
from subprocess import check_output
from unittest.mock import Mock
from ..client_config import ClientConfig
def test_mariadb_server_logs_error_when_serverbin_invalid(mariadb_server):
mocklog = Mock()
server_bin = "invalid_mysqld"
cfg = ClientConfig(mocklog, name="nonexistentcfg.json") # default config
# Give the kernel a wrong mysqld binary
cfg.default_config.update({"server_bin": server_bin})
mariadb_server(mocklog, cfg)
mocklog.error.assert_any_call(f"No MariaDB Server found at {server_bin};")
def test_mariadb_server_starts_stops_mysqld_correctly(mariadb_server):
mocklog = Mock()
cfg = ClientConfig(mocklog, name="nonexistentcfg.json") # default config
server = mariadb_server(mocklog, cfg)
mocklog.info.assert_any_call("Started MariaDB server successfully")
assert server.is_up() == True
# Throws CalledProcessError when return value of pidof is non-zero
check_output(["pidof", "mysqld"])
# It's fine to call this here, mariadb_server fixture won't do any harm
# when it calls server.stop() too
server.stop()
mocklog.info.assert_any_call("Stopped MariaDB server successfully")
# Throws TimeoutExpired if the server didn't die
server.server.wait(timeout=3)
assert server.is_up() == False
``` |
{
"source": "jonakoudijs/jona.io",
"score": 3
} |
#### File: _functions/_songkick/main.py
```python
import os
import json
import requests
def gigography(request):
"""
Retrieve list of past concerts from Songkick.
Args:
no arguments are required for this function.
Returns:
a list of past concerts from Songkick.
"""
# set required variables
username = os.environ['SONGKICK_USERNAME']
apikey = os.environ['SONGKICK_APIKEY']
# construct url
url = 'https://api.songkick.com/api/3.0/users/' + username + '/gigography.json?apikey=' + apikey
# connect to songkick api
response = requests.get(url)
response_dict = json.loads(response.text)
# select all events from api output
events = response_dict['resultsPage']['results']['event']
# select page information
response_perpage = response_dict['resultsPage']['perPage']
response_page = response_dict['resultsPage']['page']
response_totalentries = response_dict['resultsPage']['totalEntries']
# build new dictionary of events
concerts = []
for event in reversed(events):
if event['type'] == 'Concert':
case = {
'artist_id' : event['performance'][0]['artist']['id'],
'artist_name' : event['performance'][0]['artist']['displayName'],
'date' : event['start']['date'],
'venue' : event['venue']['displayName'],
'uri' : event['uri']
}
concerts.append(case)
# set dictionary variables
concerts = json.dumps(concerts)
# set CORS headers for the preflight request
if request.method == 'OPTIONS':
# allows GET requests from any origin with the Content-Type
# header and caches preflight response for an 3600s
response_headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET',
'Access-Control-Allow-Headers': 'Content-Type',
'Access-Control-Max-Age': '3600'
}
return ('', 204, response_headers)
# Set CORS headers for the main request
response_headers = {
'Access-Control-Allow-Origin': '*'
}
# return list of past concerts
return (concerts, 200, response_headers)
``` |
{
"source": "jonalmeida/hack-the-north-crazyflie",
"score": 3
} |
#### File: jonalmeida/hack-the-north-crazyflie/controller.py
```python
import os, sys, inspect, thread, time, getopt
import Leap
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
import hover
import scan
from threading import Thread
# Configuration dictionary
config = {}
class SampleListener(Leap.Listener):
finger_names = ['Thumb', 'Index', 'Middle', 'Ring', 'Pinky']
bone_names = ['Metacarpal', 'Proximal', 'Intermediate', 'Distal']
state_names = ['STATE_INVALID', 'STATE_START', 'STATE_UPDATE', 'STATE_END']
global config
def on_init(self, controller):
print "Leap Initialized"
self._x = 0
self._y = 0
self._z = 0
self._pitch = 0
self._roll = 0
self._yaw = 0
print "Initialized"
def on_connect(self, controller):
print "Leap Connected"
# Enable gestures
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);
link_uri = scan.getAvailable()
if not link_uri:
print "No Crazyflie found in the vicinity. Nothing to do.."
return
# Don't bother getting here because we have quad to control
my_hover = hover.Hover(link_uri, self, config);
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
print "Leap Disconnected"
def on_exit(self, controller):
print "Leap Exited"
def on_frame(self, controller):
# Get the most recent frame and report some basic information
frame = controller.frame()
# print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
# frame.id, frame.timestamp, len(frame.hands), len(frame.fingers), len(frame.tools), len(frame.gestures()))
i_box = frame.interaction_box
hand_of_interest = frame.hands[0]
# for hand in frame.hands:
# print " %s, id %d, position: %s" % (
# handType, hand.id, hand.palm_position)
# normal = hand.palm_normal
# direction = hand.direction
# print " pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (
# direction.pitch * Leap.RAD_TO_DEG,
# normal.roll * Leap.RAD_TO_DEG,
# direction.yaw * Leap.RAD_TO_DEG)
normalized_hand = i_box.normalize_point(hand_of_interest.fingers[0].tip_position)
# print "id %d, position:\t x - %s,\t y - %s,\t z - %s, " % (
# frame.id,
# normalized_hand.x,
# normalized_hand.y,
# normalized_hand.z)
# Get the hand's normal vector and direction
normal = hand_of_interest.palm_normal
direction = hand_of_interest.direction
# print "id %d position:\t p - %f,\t r - %f,\t y - %f, " % (
# frame.id,
# direction.pitch,
# normal.roll,
# direction.yaw)
self._pitch = direction.pitch
self._roll = normal.roll
self._yaw = direction.yaw
self._y = normalized_hand.y
# Get hands
# for hand in frame.hands:
# handType = "Left hand" if hand.is_left else "Right hand"
# print " %s, id %d, position: %s" % (
# handType, hand.id, hand.palm_position)
# # Get the hand's normal vector and direction
# normal = hand.palm_normal
# direction = hand.direction
# # Calculate the hand's pitch, roll, and yaw angles
# print " pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (
# direction.pitch * Leap.RAD_TO_DEG,
# normal.roll * Leap.RAD_TO_DEG,
# direction.yaw * Leap.RAD_TO_DEG)
# # Get arm bone
# arm = hand.arm
# print " Arm direction: %s, wrist position: %s, elbow position: %s" % (
# arm.direction,
# arm.wrist_position,
# arm.elbow_position)
# # Get fingers
# for finger in hand.fingers:
# print " %s finger, id: %d, length: %fmm, width: %fmm" % (
# self.finger_names[finger.type()],
# finger.id,
# finger.length,
# finger.width)
# # Get bones
# for b in range(0, 4):
# bone = finger.bone(b)
# print " Bone: %s, start: %s, end: %s, direction: %s" % (
# self.bone_names[bone.type],
# bone.prev_joint,
# bone.next_joint,
# bone.direction)
# # Get tools
# for tool in frame.tools:
# print " Tool id: %d, position: %s, direction: %s" % (
# tool.id, tool.tip_position, tool.direction)
# # Get gestures
# for gesture in frame.gestures():
# if gesture.type == Leap.Gesture.TYPE_CIRCLE:
# circle = CircleGesture(gesture)
# # Determine clock direction using the angle between the pointable and the circle normal
# if circle.pointable.direction.angle_to(circle.normal) <= Leap.PI/2:
# clockwiseness = "clockwise"
# else:
# clockwiseness = "counterclockwise"
# # Calculate the angle swept since the last frame
# swept_angle = 0
# if circle.state != Leap.Gesture.STATE_START:
# previous_update = CircleGesture(controller.frame(1).gesture(circle.id))
# swept_angle = (circle.progress - previous_update.progress) * 2 * Leap.PI
# print " Circle id: %d, %s, progress: %f, radius: %f, angle: %f degrees, %s" % (
# gesture.id, self.state_names[gesture.state],
# circle.progress, circle.radius, swept_angle * Leap.RAD_TO_DEG, clockwiseness)
# if gesture.type == Leap.Gesture.TYPE_SWIPE:
# swipe = SwipeGesture(gesture)
# print " Swipe id: %d, state: %s, position: %s, direction: %s, speed: %f" % (
# gesture.id, self.state_names[gesture.state],
# swipe.position, swipe.direction, swipe.speed)
# if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
# keytap = KeyTapGesture(gesture)
# print " Key Tap id: %d, %s, position: %s, direction: %s" % (
# gesture.id, self.state_names[gesture.state],
# keytap.position, keytap.direction )
# if gesture.type == Leap.Gesture.TYPE_SCREEN_TAP:
# screentap = ScreenTapGesture(gesture)
# print " Screen Tap id: %d, %s, position: %s, direction: %s" % (
# gesture.id, self.state_names[gesture.state],
# screentap.position, screentap.direction )
# if not (frame.hands.is_empty and frame.gestures().is_empty):
# print ""
def state_string(self, state):
if state == Leap.Gesture.STATE_START:
return "STATE_START"
if state == Leap.Gesture.STATE_UPDATE:
return "STATE_UPDATE"
if state == Leap.Gesture.STATE_STOP:
return "STATE_STOP"
if state == Leap.Gesture.STATE_INVALID:
return "STATE_INVALID"
def x(self):
return self._x;
def y(self):
return self._y;
def z(self):
return self._z;
def pitch(self):
return self._pitch;
def roll(self):
return self._roll;
def yaw(self):
return self._yaw;
def main(argv):
try:
opts, args = getopt.getopt(argv, "rh", ["reconnect", "help"])
except getopt.GetoptError as err:
sys.exit(1)
for opt, arg in opts:
if opt in ("-r", "--reconnect"):
print "Auto-reconnect enabled"
config["reconnect"] = True
elif opt in ("-h", "--help"):
print "No help menu yet, read code instead."
# Create a sample listener and controller
listener = SampleListener()
controller = Leap.Controller()
# Have the sample listener receive events from the controller
controller.add_listener(listener)
# Keep this process running until Enter is pressed
print "Press Enter to quit..."
try:
sys.stdin.readline()
except KeyboardInterrupt:
pass
finally:
# Remove the sample listener when done
controller.remove_listener(listener)
if __name__ == "__main__":
main(sys.argv[1:])
```
#### File: jonalmeida/hack-the-north-crazyflie/hover.py
```python
import time, sys
from threading import Thread
#FIXME: Has to be launched from within the example folder
sys.path.append("/home/jonathan/Programs/crazyflie/cfclient-2014.01.0/lib")
import cflib
from cflib.crazyflie import Crazyflie
from controller import SampleListener
import logging
logging.basicConfig(level=logging.ERROR)
class Hover:
def __init__(self, link_uri, control_listener, config=None):
""" Initialize and run the example with the specified link_uri """
self._config = config
self._cf = Crazyflie()
self._control_listener = control_listener
self._cf.connected.add_callback(self._connected)
self._cf.disconnected.add_callback(self._disconnected)
self._cf.connection_failed.add_callback(self._connection_failed)
self._cf.connection_lost.add_callback(self._connection_lost)
self._cf.open_link(link_uri)
print "Connecting to %s" % link_uri
def _connected(self, link_uri):
print "Connected to %s" % link_uri
Thread(target=self._hover_this_shit).start()
# self._hover_this_shit()
def _disconnected(self, link_uri):
print "disconnected from %s" % link_uri
def _connection_failed(self, link_uri, msg):
print "Connection to %s failed: %s" % (link_uri, msg)
if not self._config:
if "reconnect" in self._config:
print "Attempting reconnect.."
if self._config["reconnect"]:
self._cf.open_link(link_uri)
else:
sys.exit(2)
def _connection_lost(self, link_uri, msg):
print "Connection to %s lost: %s" % (link_uri, msg)
if "reconnect" in self._config:
print "Attempting reconnect.."
if self._config["reconnect"]:
self._cf.open_link(link_uri)
else:
sys.exit(3)
# def _hover_this_shit(self):
# print "Hovering this shit"
# thrust_mult = 1.5
# thrust_step = 500
# thrust = 20000
# pitch = -6
# roll = -2
# yawrate = 0
# while thrust >= 20000:
# self._cf.commander.send_setpoint(roll, pitch, yawrate, thrust)
# time.sleep(0.1)
# if thrust >=47000:
# thrust_mult = -1
# thrust += thrust_step * thrust_mult
# self._cf.commander.send_setpoint(0, 0, 0, 0)
# # Make sure that the last packet leaves before the link is closed
# # since the message queue is not flushed before closing
# time.sleep(0.1)
# self._cf.close_link()
def _hover_this_shit(self):
print "Hovering this shit"
# try:
while True:
print "asdasd %s %s %s %d" % (
int(self._control_listener.roll()*10),
int(self._control_listener.pitch()*10),
int(self._control_listener.yaw()*100),
int(self._control_listener.y() * 47000))
self._cf.commander.send_setpoint(
int(self._control_listener.roll()*10),
int(self._control_listener.pitch()*10),
int(self._control_listener.yaw()*100),
int(self._control_listener.y() * 47000))
# self._cf.commander.send_setpoint(
# 0,
# 0,
# 0,
# int(self._control_listener.y() * 47000))
time.sleep(0.1)
# except (KeyboardInterrupt):
# self._cf.commander.send_setpoint(0, 0, 0, 0)
# self._cf.close_link()
# exit
``` |
{
"source": "Jonamaita/iec62056-21",
"score": 3
} |
#### File: iec62056-21/iec62056_21/messages.py
```python
import re
import typing
from iec62056_21.exceptions import Iec6205621ParseError, ValidationError
from iec62056_21 import constants, utils
ENCODING = "latin-1"
# Regex to be used for parsing data. Compiled once for reuse later.
regex_data_set = re.compile(r"^(.+)\((.*)\)")
regex_data_set_data = re.compile(r"^(.*)\*(.*)")
regex_data_just_value = re.compile(r"^\((.*)\)")
class Iec6205621Data:
"""
Base class for IEC 62056-21 messages.
"""
def to_representation(self):
raise NotImplementedError("Needs to be implemented in subclass")
def to_bytes(self):
"""
Ensures the correct encoding to bytes.
"""
return self.to_representation().encode(constants.ENCODING)
@classmethod
def from_representation(cls, string_data):
raise NotImplementedError("Needs to be implemented in subclass")
@classmethod
def from_bytes(cls, bytes_data):
"""
Ensures the correct decoding from bytes.
"""
return cls.from_representation(bytes_data.decode(constants.ENCODING))
class DataSet(Iec6205621Data):
"""
The data set is the smallest component of a response.
It consists of an address and value with optional unit. in the format of
{address}({value}*{unit})
"""
EXCLUDE_CHARS = ["(", ")", "/", "!"]
def __init__(self, value: str, address: str = None, unit: str = None):
# TODO: in programming mode, protocol mode C the value can be up to 128 chars
self.address = address
self.value = value
self.unit = unit
def to_representation(self) -> str:
if self.unit is not None and self.address is not None:
return f"{self.address}({self.value}*{self.unit})"
elif self.address is not None and self.unit is None:
return f"{self.address}({self.value})"
else:
if self.value is None:
return f"()"
else:
return f"({self.value})"
@classmethod
def from_representation(cls, data_set_string):
just_value = regex_data_just_value.search(data_set_string)
if just_value:
return cls(address=None, value=just_value.group(1), unit=None)
first_match = regex_data_set.search(data_set_string)
if not first_match:
raise Iec6205621ParseError(
f"Unable to find address and data in {data_set_string}"
)
address = first_match.group(1)
values_data = first_match.group(2)
second_match = regex_data_set_data.search(values_data)
if second_match:
return cls(
address=address, value=second_match.group(1), unit=second_match.group(2)
)
else:
return cls(address=address, value=values_data, unit=None)
def __repr__(self):
return (
f"{self.__class__.__name__}("
f"value={self.value!r}, "
f"address={self.address!r}, "
f"unit={self.unit!r}"
f")"
)
class DataLine(Iec6205621Data):
"""
A data line is a list of data sets.
"""
def __init__(self, data_sets):
self.data_sets: typing.List[DataSet] = data_sets
def to_representation(self):
sets_representation = [_set.to_representation() for _set in self.data_sets]
return "".join(sets_representation)
@classmethod
def from_representation(cls, string_data):
"""
Is a list of data sets id(value*unit)id(value*unit)
need to split after each ")"
"""
separator = ")"
data_sets = list()
_string_data = string_data
for x in range(0, string_data.count(separator)):
index = _string_data.find(separator) + 1
data_set_string = _string_data[:index]
_string_data = _string_data[index:]
data_set = DataSet.from_representation(data_set_string=data_set_string)
data_sets.append(data_set)
return cls(data_sets=data_sets)
def __repr__(self):
return f"{self.__class__.__name__}(" f"data_sets={self.data_sets!r}" f")"
class DataBlock(Iec6205621Data):
"""
A data block is a list of DataLines, each ended with a the line end characters
\n\r
"""
def __init__(self, data_lines):
self.data_lines = data_lines
def to_representation(self):
lines_rep = [
(line.to_representation() + constants.LINE_END) for line in self.data_lines
]
return "".join(lines_rep)
@classmethod
def from_representation(cls, string_data: str):
lines = string_data.splitlines()
data_lines = [DataLine.from_representation(line) for line in lines]
return cls(data_lines)
def __repr__(self):
return f"{self.__class__.__name__}(data_lines={self.data_lines!r})"
class ReadoutDataMessage(Iec6205621Data):
def __init__(self, data_block):
self.data_block = data_block
def to_representation(self):
data = (
f"{constants.STX}{self.data_block.to_representation()}{constants.END_CHAR}"
f"{constants.LINE_END}{constants.ETX}"
)
return utils.add_bcc(data)
@classmethod
def from_representation(cls, string_data: str):
_in_data = string_data
if not utils.bcc_valid(string_data):
raise ValueError("BCC not valid")
_in_data = _in_data[1:-5] # remove stx and !<cr><lf>ETX bcc
data_block = DataBlock.from_representation(_in_data)
return cls(data_block=data_block)
def __repr__(self):
return f"{self.__class__.__name__}(data_block={self.data_block!r})"
class CommandMessage(Iec6205621Data):
allowed_commands = ["P", "W", "R", "E", "B"]
allowed_command_types = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
def __init__(
self, command: str, command_type: str, data_set: typing.Optional[DataSet]
):
self.command = command
self.command_type = command_type
self.data_set = data_set
if command not in self.allowed_commands:
raise ValueError(f"{command} is not an allowed command")
if command_type not in self.allowed_command_types:
raise ValueError(f"{command_type} is not an allowed command type")
def to_representation(self):
header = f"{constants.SOH}{self.command}{self.command_type}"
if self.data_set:
body = f"{constants.STX}{self.data_set.to_representation()}{constants.ETX}"
else:
body = f"{constants.ETX}"
message = f"{header}{body}"
return utils.add_bcc(message)
@classmethod
def from_representation(cls, string_data):
if not utils.bcc_valid(string_data):
raise ValueError("BCC not valid")
_message = string_data[:-1] # remove bcc
header = _message[:3]
body = _message[3:]
command = header[1]
command_type = header[2]
data_set = DataSet.from_representation(body[1:-1])
return cls(command, command_type, data_set)
@classmethod
def for_single_read(cls, address, additional_data=None):
if additional_data:
_add_data = additional_data
else:
_add_data = ""
data_set = DataSet(value=_add_data, address=address)
return cls(command="R", command_type="1", data_set=data_set)
@classmethod
def for_single_write(cls, address, value):
data_set = DataSet(value=value, address=address)
return cls(command="W", command_type="1", data_set=data_set)
def __repr__(self):
return (
f"{self.__class__.__name__}("
f"command={self.command!r}, "
f"command_type={self.command_type!r}, "
f"data_set={self.data_set!r}"
f")"
)
class AnswerDataMessage(Iec6205621Data):
def __init__(self, data_block):
self.data_block = data_block
self._data = None
@property
def data(self):
if not self._data:
self._get_all_data_sets()
return self._data
def _get_all_data_sets(self):
data_sets = list()
for line in self.data_block.data_lines:
for set in line.data_sets:
data_sets.append(set)
self._data = data_sets
def to_representation(self):
# TODO: this is not valid in case reading out partial blocks.
rep = f"{constants.STX}{self.data_block.to_representation()}{constants.ETX}"
return utils.add_bcc(rep)
@classmethod
def from_representation(cls, string_data):
_in_data = string_data
if not utils.bcc_valid(string_data):
raise ValueError("BCC not valid")
_in_data = _in_data[1:-2] # remove stx -- etx bcc
data_block = DataBlock.from_representation(_in_data)
return cls(data_block=data_block)
def __repr__(self):
return f"{self.__class__.__name__}(data_block={self.data_block!r})"
class RequestMessage(Iec6205621Data):
def __init__(self, device_address=""):
self.device_address = device_address
def to_representation(self):
return (
f"{constants.START_CHAR}{constants.REQUEST_CHAR}{self.device_address}"
f"{constants.END_CHAR}{constants.LINE_END}"
)
@classmethod
def from_representation(cls, string_data):
device_address = string_data[2:-3]
return cls(device_address)
def __repr__(self):
return f"{self.__class__.__name__}(device_address={self.device_address!r})"
class AckOptionSelectMessage(Iec6205621Data):
"""
Only support protocol mode 0: Normal
"""
def __init__(self, baud_char, mode_char):
self.baud_char = baud_char
self.mode_char = mode_char
def to_representation(self):
return f"{constants.ACK}0{self.baud_char}{self.mode_char}{constants.LINE_END}"
@classmethod
def from_representation(cls, string_data):
baud_char = string_data[2]
mode_char = string_data[3]
return cls(baud_char, mode_char)
def __repr__(self):
return (
f"{self.__class__.__name__}("
f"baud_char={self.baud_char!r}, "
f"mode_char={self.mode_char!r}"
f")"
)
class IdentificationMessage(Iec6205621Data):
def __init__(
self, identification: str, manufacturer: str, switchover_baudrate_char: str
):
self.identification: str = identification
self.manufacturer: str = manufacturer
self.switchover_baudrate_char: str = switchover_baudrate_char
def to_representation(self):
return (
f"{constants.START_CHAR}{self.manufacturer}{self.switchover_baudrate_char}\\"
f"{self.identification}{constants.LINE_END}"
)
@classmethod
def from_representation(cls, string_data):
manufacturer = string_data[1:4]
switchover_baudrate_char = string_data[4]
identification = string_data[6:-2]
return cls(identification, manufacturer, switchover_baudrate_char)
def __repr__(self):
return (
f"{self.__class__.__name__}("
f"identification={self.identification!r}, "
f"manufacturer={self.manufacturer!r}, "
f"switchover_baudrate_char={self.switchover_baudrate_char!r}"
f")"
)
```
#### File: iec62056-21/iec62056_21/utils.py
```python
from iec62056_21 import constants
def bcc_valid(message):
bcc = message[-1]
to_calc = message[:-1]
calc = add_bcc(to_calc)
if message == calc:
return True
else:
return False
def add_bcc(message):
"""
Returns the message with BCC added.
Data to use starts after STX and ends with but includes ETX
If there is a SOH in the message the calculation should be done from there.
"""
if isinstance(message, str):
_message = message.encode(constants.ENCODING)
return _add_bcc(_message).decode(constants.ENCODING)
return _add_bcc(message)
def _add_bcc(message: bytes):
start_bcc_index = 1
soh_index = message.find(constants.SOH.encode(constants.ENCODING))
if soh_index == -1:
# SOH not found
stx_index = message.find(constants.STX.encode(constants.ENCODING))
if stx_index == -1:
raise IndexError("No SOH or STX found i message")
start_bcc_index = stx_index + 1
else:
start_bcc_index = soh_index + 1
data_for_bcc = message[start_bcc_index:]
bcc = calculate_bcc(data_for_bcc)
return message + bcc
def calculate_bcc(data):
"""
Calculate BCC.
"""
if isinstance(data, str):
_bcc = _calculate_bcc(data.encode(constants.ENCODING))
return _bcc.decode(constants.ENCODING)
return _calculate_bcc(data)
def _calculate_bcc(bytes_data: bytes):
bcc = 0
for b in bytes_data:
x = b & 0x7F
bcc ^= x
bcc &= 0x7F
return bcc.to_bytes(length=1, byteorder="big")
def ensure_bytes(data):
if isinstance(data, str):
return data.encode(constants.ENCODING)
elif isinstance(data, bytes):
return data
else:
raise ValueError(f"data:{data!r} cant be converted to bytes")
```
#### File: iec62056-21/tests/test_bcc.py
```python
import pytest
from iec62056_21.utils import calculate_bcc, add_bcc
class TestBcc:
def test_bcc_bytes1(self):
data = bytes.fromhex("01573202433030332839313033323430393232333929031b")
correct_bcc = chr(data[-1]).encode("latin-1")
bcc = calculate_bcc(data[1:-1])
assert bcc == correct_bcc
def test_bcc_bytes_2(self):
data = b"\x01P0\x02(1234567)\x03P"
correct_bcc = chr(data[-1]).encode("latin-1")
bcc = calculate_bcc(data[1:-1])
assert bcc == correct_bcc
def test_bcc_string(self):
data = "\x01P0\x02(1234567)\x03P"
correct_bcc = data[-1]
bcc = calculate_bcc(data[1:-1])
assert bcc == correct_bcc
def test_add_bcc1(self):
data = "\x01P0\x02(1234567)\x03"
correct_data = "\x01P0\x02(1234567)\x03P"
with_bcc = add_bcc(data)
assert with_bcc == correct_data
```
#### File: iec62056-21/tests/test_client.py
```python
import pytest
from iec62056_21 import exceptions, client, transports
class TestIec6205621Client:
def test_with_no_address_when_required_raises_client_error(self):
with pytest.raises(exceptions.Iec6205621ClientError):
c = client.Iec6205621Client.with_tcp_transport(("192.168.1.1", 5000))
def test_can_create_client_with_tcp_transport(self):
c = client.Iec6205621Client.with_tcp_transport(
"192.168.1.1", device_address="00000000"
)
def test_no_address_when_required_raises_client_error(self):
trans = transports.TcpTransport(address=("192.168.1.1", 5000))
with pytest.raises(exceptions.Iec6205621ClientError):
c = client.Iec6205621Client(transport=trans)
def test_can_create_client_tcp_transport(self):
trans = transports.TcpTransport(address=("192.168.1.1", 5000))
c = client.Iec6205621Client(transport=trans, device_address="00000000")
``` |
{
"source": "jonancm/django-quantity-field",
"score": 2
} |
#### File: django-quantity-field/tests/test_fields.py
```python
import sys
from django.core import serializers
from django.core.exceptions import ValidationError
from django.test import TestCase
from quantity_field import ureg
from quantity_field.base import MultiQuantity
from quantity_field.fields import MultiQuantityField
from tests.models import Package
class MultiQuantityFieldTest(TestCase):
def setUp(self):
size = MultiQuantity.from_list(2, 5.5, 4, str(ureg.m))
weight = MultiQuantity.from_string('42 kilogram')
self.entry = Package.objects.create(size=size, weight=weight)
def test_field(self):
self.assertRaises(ValidationError, MultiQuantityField)
self.assertRaises(ValidationError, MultiQuantityField, dim=3.0)
self.assertRaises(ValidationError, MultiQuantityField, dim=-4)
self.assertRaises(ValidationError, MultiQuantityField, units=42)
self.assertRaises(ValidationError, MultiQuantityField, units=[1, 1, 2, 3, 5])
self.assertRaises(ValidationError, MultiQuantityField, units=(ureg.g, ureg.m,))
def test_deconstruct(self):
field = MultiQuantityField(units=(ureg.g, ureg.kg))
name, path, args, kwargs = field.deconstruct()
module, cls = path.rsplit('.', 1)
field_class = getattr(sys.modules[module], cls)
field_instance = field_class(*args, **kwargs)
self.assertIsInstance(field_instance, field.__class__)
def test_serialize(self):
data = serializers.serialize('xml', Package.objects.all())
first = serializers.deserialize('xml', data).next().object
self.assertEqual(first, Package.objects.first())
def test_read(self):
self.entry.refresh_from_db()
size = MultiQuantity.from_list(2, 5.5, 4, str(ureg.m))
self.assertEqual(self.entry.size, size)
weight = MultiQuantity.from_string('42 kilogram')
self.assertEqual(self.entry.weight, weight)
def test_write(self):
weight = MultiQuantity.from_string('2 kg')
self.entry.weight = weight
self.entry.save()
self.entry.refresh_from_db()
self.assertEqual(self.entry.weight, weight)
``` |
{
"source": "jonancm/viennagrid-python",
"score": 3
} |
#### File: examples/viennagrid_wrapper/algorithms.py
```python
from __future__ import print_function
# In this example, we will set up a domain of triangles in the cartesian 2D
# space from the contents of a Netgen mesh file.
#
# For that purpose, we need to define a domain and, eventually, also a segmentation
# (in case we want to read segmentation data from the mesh file), and we need the
# Netgen reader function, too.
#
# (Notice that the 'read_netgen' function and all other I/O functions
# work with any type of domain and segmentation without name change.)
from viennagrid.wrapper import TriangularCartesian2D_Domain as Domain
from viennagrid.wrapper import TriangularCartesian2D_Segmentation as Segmentation
from viennagrid.wrapper import PointCartesian2D as Point
from viennagrid.wrapper import apply_voronoi
from viennagrid.wrapper import cell_refine
from viennagrid.wrapper import centroid
from viennagrid.wrapper import circumcenter
from viennagrid.wrapper import is_boundary
from viennagrid.wrapper import is_interface
from viennagrid.wrapper import refine
from viennagrid.wrapper import refine_uniformly
from viennagrid.wrapper import scale
from viennagrid.wrapper import spanned_volume
from viennagrid.wrapper import surface
from viennagrid.wrapper import volume
####################################
# Domain setup
####################################
domain = Domain()
p0 = Point(0, 0)
p1 = Point(1, 0)
p2 = Point(2, 0)
p3 = Point(2, 1)
p4 = Point(1, 1)
p5 = Point(0, 1)
domain.make_vertex(p0) # Vertex with ID #0
domain.make_vertex(p1) # Vertex with ID #1
domain.make_vertex(p2) # Vertex with ID #2
domain.make_vertex(p3) # Vertex with ID #3
domain.make_vertex(p4) # Vertex with ID #4
domain.make_vertex(p5) # Vertex with ID #5
segmentation = Segmentation(domain)
seg0 = segmentation.make_segment()
seg1 = segmentation.make_segment()
cell_00 = seg0.make_cell(domain.get_vertex(0), domain.get_vertex(1), domain.get_vertex(5))
cell_01 = seg0.make_cell(domain.get_vertex(1), domain.get_vertex(4), domain.get_vertex(5))
cell_10 = seg1.make_cell(domain.get_vertex(1), domain.get_vertex(2), domain.get_vertex(4))
cell_11 = seg1.make_cell(domain.get_vertex(3), domain.get_vertex(2), domain.get_vertex(4))
####################################
# apply_voronoi
####################################
apply_voronoi(domain)
####################################
# cell_refine
####################################
def predicate(cell):
return True
cell_refine(domain, segmentation, predicate)
####################################
# centroid
####################################
point = centroid(cell_00)
####################################
# circumcenter
####################################
point = circumcenter(cell_00)
####################################
# is_boundary
####################################
facet = cell_00.facets[0]
edge = cell_00.edges[0]
vertex = cell_00.vertices[0]
segment = seg0
is_boundary(domain, facet)
is_boundary(domain, edge)
is_boundary(domain, vertex)
is_boundary(segment, facet)
is_boundary(segment, edge)
is_boundary(segment, vertex)
####################################
# is_interface
####################################
segment0 = seg0
segment1 = seg1
facet = cell_00.facets[0]
edge = cell_00.edges[0]
vertex = cell_00.vertices[0]
is_interface(segment0, segment1, facet)
is_interface(segment0, segment1, edge)
is_interface(segment0, segment1, vertex)
####################################
# refine
####################################
def predicate(edge):
return True
refined_domain, refined_segmentation = refine(domain, segmentation, predicate)
####################################
# refine_uniformly
####################################
refined_domain, refined_segmentation = refine_uniformly(domain, segmentation)
####################################
# scale
####################################
scale(domain, 2.5)
####################################
# spanned_volume
####################################
vol = spanned_volume(p0, p1, p5)
vol = spanned_volume(*[vertex.to_point() for vertex in cell_00.vertices])
####################################
# surface
####################################
cell = cell_00
sur = surface(cell)
sur = surface(domain)
sur = surface(segment)
####################################
# volume
####################################
vol = volume(cell)
vol = volume(domain)
vol = volume(segment)
```
#### File: viennagrid-python/test/test_segmentations.py
```python
import sys
if len(sys.argv) > 1:
sys.path.insert(0, sys.argv.pop(1))
import unittest
import viennagrid.wrapper
##################
# LINEAR DOMAINS #
##################
class TestLinearCartesian1D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian1D(1),
viennagrid.wrapper.PointCartesian1D(2),
viennagrid.wrapper.PointCartesian1D(3),
viennagrid.wrapper.PointCartesian1D(4),
viennagrid.wrapper.PointCartesian1D(5),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearCartesian1D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearCartesian1D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestLinearCartesian2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian2D(1, 2),
viennagrid.wrapper.PointCartesian2D(2, 3),
viennagrid.wrapper.PointCartesian2D(3, 4),
viennagrid.wrapper.PointCartesian2D(4, 5),
viennagrid.wrapper.PointCartesian2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearCartesian2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearCartesian2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestLinearCartesian3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian3D(1, 2, 7),
viennagrid.wrapper.PointCartesian3D(2, 3, 7),
viennagrid.wrapper.PointCartesian3D(3, 4, 7),
viennagrid.wrapper.PointCartesian3D(4, 5, 7),
viennagrid.wrapper.PointCartesian3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearCartesian3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearCartesian3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestLinearCylindrical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCylindrical3D(1, 2, 7),
viennagrid.wrapper.PointCylindrical3D(2, 3, 7),
viennagrid.wrapper.PointCylindrical3D(3, 4, 7),
viennagrid.wrapper.PointCylindrical3D(4, 5, 7),
viennagrid.wrapper.PointCylindrical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearCylindrical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearCylindrical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestLinearPolar2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointPolar2D(1, 2),
viennagrid.wrapper.PointPolar2D(2, 3),
viennagrid.wrapper.PointPolar2D(3, 4),
viennagrid.wrapper.PointPolar2D(4, 5),
viennagrid.wrapper.PointPolar2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearPolar2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearPolar2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestLinearSpherical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointSpherical3D(1, 2, 7),
viennagrid.wrapper.PointSpherical3D(2, 3, 7),
viennagrid.wrapper.PointSpherical3D(3, 4, 7),
viennagrid.wrapper.PointSpherical3D(4, 5, 7),
viennagrid.wrapper.PointSpherical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.LinearSpherical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.LinearSpherical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
segment.make_cell(v1, v2)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
######################
# TRIANGULAR DOMAINS #
######################
class TestTriangularCartesian2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian2D(1, 2),
viennagrid.wrapper.PointCartesian2D(2, 3),
viennagrid.wrapper.PointCartesian2D(3, 4),
viennagrid.wrapper.PointCartesian2D(4, 5),
viennagrid.wrapper.PointCartesian2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TriangularCartesian2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TriangularCartesian2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTriangularCartesian3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian3D(1, 2, 7),
viennagrid.wrapper.PointCartesian3D(2, 3, 7),
viennagrid.wrapper.PointCartesian3D(3, 4, 7),
viennagrid.wrapper.PointCartesian3D(4, 5, 7),
viennagrid.wrapper.PointCartesian3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TriangularCartesian3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TriangularCartesian3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTriangularCylindrical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCylindrical3D(1, 2, 7),
viennagrid.wrapper.PointCylindrical3D(2, 3, 7),
viennagrid.wrapper.PointCylindrical3D(3, 4, 7),
viennagrid.wrapper.PointCylindrical3D(4, 5, 7),
viennagrid.wrapper.PointCylindrical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TriangularCylindrical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TriangularCylindrical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTriangularPolar2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointPolar2D(1, 2),
viennagrid.wrapper.PointPolar2D(2, 3),
viennagrid.wrapper.PointPolar2D(3, 4),
viennagrid.wrapper.PointPolar2D(4, 5),
viennagrid.wrapper.PointPolar2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TriangularPolar2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TriangularPolar2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTriangularSpherical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointSpherical3D(1, 2, 7),
viennagrid.wrapper.PointSpherical3D(2, 3, 7),
viennagrid.wrapper.PointSpherical3D(3, 4, 7),
viennagrid.wrapper.PointSpherical3D(4, 5, 7),
viennagrid.wrapper.PointSpherical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TriangularSpherical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TriangularSpherical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
#########################
# QUADRILATERAL DOMAINS #
#########################
class TestQuadrilateralCartesian2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian2D(1, 2),
viennagrid.wrapper.PointCartesian2D(2, 3),
viennagrid.wrapper.PointCartesian2D(3, 4),
viennagrid.wrapper.PointCartesian2D(4, 5),
viennagrid.wrapper.PointCartesian2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.QuadrilateralCartesian2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.QuadrilateralCartesian2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestQuadrilateralCartesian3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian3D(1, 2, 7),
viennagrid.wrapper.PointCartesian3D(2, 3, 7),
viennagrid.wrapper.PointCartesian3D(3, 4, 7),
viennagrid.wrapper.PointCartesian3D(4, 5, 7),
viennagrid.wrapper.PointCartesian3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.QuadrilateralCartesian3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.QuadrilateralCartesian3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestQuadrilateralCylindrical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCylindrical3D(1, 2, 7),
viennagrid.wrapper.PointCylindrical3D(2, 3, 7),
viennagrid.wrapper.PointCylindrical3D(3, 4, 7),
viennagrid.wrapper.PointCylindrical3D(4, 5, 7),
viennagrid.wrapper.PointCylindrical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.QuadrilateralCylindrical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.QuadrilateralCylindrical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestQuadrilateralPolar2D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointPolar2D(1, 2),
viennagrid.wrapper.PointPolar2D(2, 3),
viennagrid.wrapper.PointPolar2D(3, 4),
viennagrid.wrapper.PointPolar2D(4, 5),
viennagrid.wrapper.PointPolar2D(5, 6),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.QuadrilateralPolar2D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.QuadrilateralPolar2D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestQuadrilateralSpherical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointSpherical3D(1, 2, 7),
viennagrid.wrapper.PointSpherical3D(2, 3, 7),
viennagrid.wrapper.PointSpherical3D(3, 4, 7),
viennagrid.wrapper.PointSpherical3D(4, 5, 7),
viennagrid.wrapper.PointSpherical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.QuadrilateralSpherical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.QuadrilateralSpherical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
#######################
# TETRAHEDRAL DOMAINS #
#######################
class TestTetrahedralCartesian3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCartesian3D(1, 2, 7),
viennagrid.wrapper.PointCartesian3D(2, 3, 7),
viennagrid.wrapper.PointCartesian3D(3, 4, 7),
viennagrid.wrapper.PointCartesian3D(4, 5, 7),
viennagrid.wrapper.PointCartesian3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TetrahedralCartesian3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TetrahedralCartesian3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTetrahedralCylindrical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointCylindrical3D(1, 2, 7),
viennagrid.wrapper.PointCylindrical3D(2, 3, 7),
viennagrid.wrapper.PointCylindrical3D(3, 4, 7),
viennagrid.wrapper.PointCylindrical3D(4, 5, 7),
viennagrid.wrapper.PointCylindrical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TetrahedralCylindrical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TetrahedralCylindrical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
class TestTetrahedralSpherical3D_Segmentation(unittest.TestCase):
def setUp(self):
self.vertices = [
viennagrid.wrapper.PointSpherical3D(1, 2, 7),
viennagrid.wrapper.PointSpherical3D(2, 3, 7),
viennagrid.wrapper.PointSpherical3D(3, 4, 7),
viennagrid.wrapper.PointSpherical3D(4, 5, 7),
viennagrid.wrapper.PointSpherical3D(5, 6, 7),
]
self.num_vertices = len(self.vertices)
self.domain = viennagrid.wrapper.TetrahedralSpherical3D_Domain()
for point in self.vertices:
self.domain.make_vertex(point)
self.segmentation = viennagrid.wrapper.TetrahedralSpherical3D_Segmentation(self.domain)
self.num_segments = 5
def test_make_segment(self):
"""Test method 'make_segment' and attribute 'segments' of class 'Segmentation'."""
self.assertEqual(len(self.segmentation.segments), 0)
self.assertEqual(self.segmentation.num_segments, 0)
for i in range(0, self.num_segments):
self.segmentation.make_segment()
self.assertEqual(len(self.segmentation.segments), i+1)
self.assertEqual(self.segmentation.num_segments, i+1)
self.assertEqual(len(self.segmentation.segments), self.num_segments)
self.assertEqual(self.segmentation.num_segments, self.num_segments)
def test_make_cell(self):
"""Test method 'make_cell' and attribute 'cells' of class 'Segment'."""
for segment in self.segmentation.segments:
self.assertEqual(len(segment.cells), 0)
self.assertEqual(segment.num_cells, 0)
v1 = self.domain.get_vertex(0)
v2 = self.domain.get_vertex(1)
v3 = self.domain.get_vertex(2)
v4 = self.domain.get_vertex(3)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 1)
self.assertEqual(segment.num_cells, 1)
v1 = self.domain.get_vertex(1)
v2 = self.domain.get_vertex(2)
v3 = self.domain.get_vertex(3)
v4 = self.domain.get_vertex(4)
segment.make_cell(v1, v2, v3, v4)
self.assertEqual(len(segment.cells), 2)
self.assertEqual(segment.num_cells, 2)
if __name__ == '__main__':
unittest.main()
```
#### File: viennagrid-python/test/utils.py
```python
import math
def equal(x, y, tol=0.000000001):
"""Compare if two real numbers are equal using a tolerance to avoid rounding errors."""
return math.fabs(x - y) < tol, '%(x)f != %(y)f' % locals()
def point_equal(p1, p2, msg=None):
are_equal = True
if p1.dim == p2.dim:
i = 0
while are_equal and i < p1.dim:
if not equal(p1.coords[i], p2.coords[i]):
are_equal = False
raise self.failureException('points are not equal')
i += 1
else:
are_equal = False
raise self.failureException('points are not equal')
return are_equal
```
#### File: viennagrid-python/viennagrid/algorithms.py
```python
import viennagrid
_SUPPORTED_NORMS = ('1', '2', 'inf')
def inner_prod(point1, point2):
"""
Compute the inner product of two vectors (represented by points).
:param point1: First point
:type point1: :class:`viennagrid.Point`
:param point2: Second point
:type point2: :class:`viennagrid.Point`
:returns: float --- the result of the inner product
:raises: TypeError
"""
try:
point1 = point1._point
except AttributeError:
raise TypeError('paramater at position 1 is not a valid point')
try:
point2 = point2._point
except AttributeError:
raise TypeError('paramater at position 1 is not a valid point')
# Try to get method 'inner_prod' from 'point1'. If it doesn't have the method,
# it means it's not a cartesian point. Thus, convert to cartesian coordinates
# and get the method. If it still doesn't have the method, raise an exception.
try:
inner_prod_fn = point1.__getattribute__('inner_prod')
except AttributeError:
casted_pnt1 = point1.to_cartesian()
try:
inner_prod_fn = casted_pnt1.__getattribute__('inner_prod')
except AttributeError:
raise TypeError('point1 has no method named inner_prod')
else:
casted_pnt1 = point1
# If point types are equal, simply calculate the inner product. If they're not
# equal, try to convert 'point2' to the type of 'point1'. If types are still
# different, it means that both points are of incompatible types
# (i.e. incompatible dimensions).
if casted_pnt1.__class__ is point2.__class__:
return inner_prod_fun(point2)
else:
casted_pnt2 = point2.to_cartesian()
if casted_pnt1.__class__ is casted_pnt2.__class__:
return inner_prod_fun(casted_pnt2)
else:
raise TypeError('incompatible point types')
def cross_prod(point1, point2):
"""
Compute the cross product of two vectors (represented by points).
:param point1: First point
:type point1: :class:`viennagrid.Point`
:param point2: Second point
:type point2: :class:`viennagrid.Point`
:returns: :class:`viennagrid.Point` --- the result of the cross product
:raises: TypeError
"""
try:
point1 = point1._point
except AttributeError:
raise TypeError('paramater at position 1 is not a valid point')
try:
point2 = point2._point
except AttributeError:
raise TypeError('paramater at position 1 is not a valid point')
# Try to get method 'cross_prod' from 'point1'. If it doesn't have the method,
# it means it's not a cartesian point. Thus, convert to cartesian coordinates
# and get the method. If it still doesn't have the method, raise an exception.
try:
cross_prod_fn = point1.__getattribute__('cross_prod')
except AttributeError:
casted_pnt1 = point1.to_cartesian()
try:
cross_prod_fn = casted_pnt1.__getattribute__('cross_prod')
except AttributeError:
raise TypeError('point1 has no method named cross_prod')
else:
casted_pnt1 = point1
# If point types are equal, simply calculate the cross product. If they're not
# equal, try to convert 'point2' to the type of 'point1'. If types are still
# different, it means that both points are of incompatible types
# (i.e. incompatible dimensions).
if casted_pnt1.__class__ is point2.__class__:
return viennagrid.Point(cross_prod_fn(point2))
else:
casted_pnt2 = point2.to_cartesian()
if casted_pnt1.__class__ is casted_pnt2.__class__:
return viennagrid.Point(cross_prod_fn(casted_pnt2))
else:
raise TypeError('incompatible point types')
def norm(point, norm_type=2):
"""
Compute the norm of a vector (represented by a point).
:param point: Point
:type point: :class:`viennagrid.Point`
:param norm_type: Norm to calculate (at this time only 1, 2 and 'inf' are supported).
:type norm_type: int or str
:returns: float --- the norm of the vector
:raises: ValueError, TypeError
"""
try:
point = point._point
except AttributeError:
raise TypeError('paramater at position 1 is not a valid point')
norm_type = str(norm_type)
if norm_type in _SUPPORTED_NORMS:
norm_fn = viennagrid.wrapper.__getattribute__('norm_%(norm_type)s' % locals())
return norm_fn(point)
else:
raise ValueError('unsupported norm type: %(norm_type)s')
def apply_voronoi(dom):
"""
Compute Voronoi information of the given domain.
:param dom: Domain
:type dom: :class:`viennagrid.Domain`
:raises: TypeError
"""
try:
dom = dom._domain
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain')
viennagrid.wrapper.apply_voronoi(dom)
def centroid(cell):
"""
Compute the centroid of the given cell.
:param cell: Cell whose centroid should be computed
:type cell: :class:`viennagrid.Cell`
:returns: :class:`viennagrid.Point` --- the centroid of the cell
:raises: TypeError
"""
try:
cell = cell._cell
except AttributeError:
raise TypeError('parameter at position 1 is not a valid cell')
point = viennagrid.wrapper.centroid(cell)
return viennagrid.Point(*point.coords, coord_sytem=point.coord_system)
def cell_refine(dom, seg, predicate):
"""
Refine all cells of the given domain and segmentation which match a given predicate.
:param dom: Domain to refine
:type dom: :class:`viennagrid.Domain`
:param seg: Segmentation of the domain to refine
:type seg: :class:`viennagrid.Segmentation`
:param predicate: Function that tells whether a cell should be refined or not
:type predicate: function object that accepts a cell as parameter and returns a boolean
:returns: A two-element tuple containing the output domain and segmentation after the refinement.
:raises: TypeError
"""
try:
config = dom.config
dom = dom._domain
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain')
try:
seg = seg._segmentation
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
refined_result = viennagrid.wrapper.cell_refine(dom, seg, predicate)
refined_domain = viennagrid.Domain(config)
refined_domain._domain = refined_result[0]
refined_segmentation = viennagrid.Segmentation(refined_domain)
refined_segmentation._segmentation = refined_result[1]
return (refined_domain, refined_segmentation)
def circumcenter(cell):
"""
Compute the circumcenter of the given cell.
:param cell: Cell whose circumcenter should be computed
:type cell: :class:`viennagrid.Cell`
:returns: :class:`viennagrid.Point` --- the circumcenter of the cell
:raises: TypeError
"""
try:
cell = cell._cell
except AttributeError:
raise TypeError('parameter at position 1 is not a valid cell')
point = viennagrid.wrapper.circumcenter(cell)
return viennagrid.Point(*point.coords, coord_system=point.coord_system)
def is_boundary(domseg, boundary_elem):
"""
Check if the given element is a boundary element of the given domain or segment.
:param domseg: Domain or segment
:type domseg: :class:`viennagrid.Domain` or :class:`viennagrid.Segment`
:param boundary_elem: Element of which to check if its a boundary element of the given domain or segment. The element can be a facet, and edge or a vertex.
:type boundary_elem: :class:`viennagrid.Facet`, :class:`viennagrid.Edge` or :class:`viennagrid.Vertex`
:returns: bool --- True if the given element is a boundary element of the given domain or segment; False otherwise.
:raises: TypeError
"""
try:
if isinstance(domseg, viennagrid.Domain):
domseg = domseg._domain
elif isinstance(domseg, viennagrid.Segment):
domseg = domseg._segment
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain or segment')
try:
if isinstance(boundary_elem, viennagrid.Facet):
boundary_elem = boundary_elem._facet
elif isinstance(boundary_elem, viennagrid.Edge):
boundary_elem = boundary_elem._edge
elif isinstance(boundary_elem, viennagrid.Vertex):
boundary_elem = boundary_elem._vertex
except AttributeError:
raise TypeError('parameter at position 2 is not a valid boundary element')
return viennagrid.wrapper.is_boundary(domseg, boundary_elem)
def is_interface(seg0, seg1, interface_elem):
"""
Check if the given element is an interface element of the two given segments.
:param seg0: First segment
:type seg0: :class:`viennagrid.Segment`
:param seg1: Second segment
:type seg1: :class:`viennagrid.Segment`
:param interface_elem: Element of which to check if its an interface element of the given segments. The element can be a facet, and edge or a vertex.
:type interface_elem: :class:`viennagrid.Facet`, :class:`viennagrid.Edge` or :class:`viennagrid.Vertex`
:returns: bool --- True if the given element is an interface element of the given segments; False otherwise.
:raises: TypeError
"""
try:
seg0 = seg0._segment
except AttributeError:
raise TypeError('parameter at position 1 is not a valid segment')
try:
seg1 = seg1._segment
except AttributeError:
raise TypeError('parameter at position 2 is not a valid segment')
try:
if isinstance(interface_elem, viennagrid.Facet):
interface_elem = interface_elem._facet
elif isinstance(interface_elem, viennagrid.Edge):
interface_elem = interface_elem._edge
elif isinstance(interface_elem, viennagrid.Vertex):
interface_elem = interface_elem._vertex
except AttributeError:
raise TypeError('parameter at position 3 is not a valid interface element')
return viennagrid.wrapper.is_interface(seg0, seg1, interface_elem)
def refine(dom, seg, predicate):
"""
Refine all edges of the given domain and segmentation which match a given predicate.
:param dom: Domain to refine
:type dom: :class:`viennagrid.Domain`
:param seg: Segmentation of the domain to refine
:type seg: :class:`viennagrid.Segmentation`
:param predicate: Function that tells whether an edge should be refined or not
:type predicate: function object that accepts an edge as parameter and returns a boolean
:returns: A two-element tuple containing the output domain and segmentation after the refinement.
:raises: TypeError
"""
try:
config = dom.config
dom = dom._domain
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain')
try:
seg = seg._segmentation
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
refined_result = viennagrid.wrapper.refine(dom, seg, predicate)
refined_domain = viennagrid.Domain(config)
refined_domain._domain = refined_result[0]
refined_segmentation = viennagrid.Segmentation(refined_domain)
refined_segmentation._segmentation = refined_result[1]
return (refined_domain, refined_segmentation)
def refine_uniformly(dom, seg):
"""
Refine all edges of the given domain and segmentation.
:param dom: Domain to refine
:type dom: :class:`viennagrid.Domain`
:param seg: Segmentation of the domain to refine
:type seg: :class:`viennagrid.Segmentation`
:returns: A two-element tuple containing the output domain and segmentation after the refinement.
:raises: TypeError
"""
try:
config = dom.config
dom = dom._domain
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain')
try:
seg = seg._segmentation
except AttributeError:
raise TypeError('parameter at position 2 is not a valid domain')
refined_result = viennagrid.wrapper.refine_uniformly(dom, seg)
refined_domain = viennagrid.Domain(config)
refined_domain._domain = refined_result[0]
refined_segmentation = viennagrid.Segmentation(refined_domain)
refined_segmentation._segmentation = refined_result[1]
return (refined_domain, refined_segmentation)
def surface(elem):
"""
Calculate the surface of the given element.
:param elem: Element whose surface should be calculated.
:type elem: :class:`viennagrid.Cell`, :class:`viennagrid.Domain` or :class:`viennagrid.Segment`
:returns: float --- the surface of the cell, domain or segment
:raises: TypeError
"""
try:
if isinstance(elem, viennagrid.Cell):
elem = elem._cell
elif isinstance(elem, viennagrid.Domain):
elem = elem._domain
elif isinstance(elem, viennagrid.Segment):
elem = elem._segment
except AttributeError:
raise TypeError('parameter at position 1 is not a valid element')
return viennagrid.wrapper.surface(elem)
def volume(elem):
"""
Calculate the volume of the given element.
:param elem: Element whose volume should be calculated.
:type elem: :class:`viennagrid.Cell`, :class:`viennagrid.Domain` or :class:`viennagrid.Segment`
:returns: float --- the volume of the cell, domain or segment
:raises: TypeError
"""
try:
if isinstance(elem, viennagrid.Cell):
elem = elem._cell
elif isinstance(elem, viennagrid.Domain):
elem = elem._domain
elif isinstance(elem, viennagrid.Segment):
elem = elem._segment
except AttributeError:
raise TypeError('parameter at position 1 is not a valid element')
return viennagrid.wrapper.volume(elem)
def scale(dom, factor):
"""
Scale a domain by a given factor.
:param dom: Domain to be scaled
:type dom: :class:`viennagrid.Domain`
:param factor: Scale factor
:type factor: float
:raises: TypeError
"""
try:
dom = dom._domain
except AttributeError:
raise TypeError('parameter at position 1 is not a valid domain')
viennagrid.wrapper.scale(dom, factor)
def spanned_volume(*args):
"""
Calculate the volume spanned by a set of points.
As arguments you have to pass an arbitrary number of points (:class:`viennagrid.Point` objects).
:returns: float --- the volume spanned by the set of points
:raises: TypeError
"""
point_list = []
for i, point in enumerate(args):
# Get the low-level point
try:
point = point._point
except AttributeError:
raise TypeError('parameter at position %(pos)d is not a valid point' % {'pos': i + 1})
# If point is not cartesian, convert it
if point.coord_system != 'cartesian':
point = point.to_cartesian()
# Append the point to the list
point_list.append(point)
return viennagrid.wrapper.spanned_volume(*point_list)
``` |
{
"source": "jonand9010/Pandem_sim",
"score": 3
} |
#### File: PandemSimulation/networks/Networks.py
```python
import numpy as np
import networkx as nx
import json
class SIRNetwork:
def __init__(self, datafiles, travel_rate, beta, gamma, travel_infection_rate = 1):
self.graph, self.A, self.pos = self.load_graph(datafiles['data'], datafiles['position'])
self.nodes = list(self.graph.nodes())
self.node_degree = np.sum(self.A, 0)
self.number_of_nodes = self.A.shape[0]
self.travel_rate = travel_rate
self.beta = beta
self.gamma = gamma
self.R0 = self.beta/self.gamma
self.travel_infection_rate = travel_infection_rate
def load_graph(self, file_graph, file_positions):
with open(file_graph) as json_file:
import_data = json.load(json_file)
with open(file_positions) as json_file:
import_pos = json.load(json_file)
import_graph = nx.node_link_graph(import_data)
G = import_graph
pos = import_pos
n_nodes = nx.number_of_nodes(G)
A = nx.to_numpy_array(G) #Save the adjacency matrix of the network
return G, A, pos
```
#### File: PandemSimulation/simulation/NetworkSimulation.py
```python
import numpy as np
class NetworkSimulation:
def __init__(self, Network, timesteps):
self.__dict__.update(Network.__dict__)
self.node_population = np.zeros((Network.number_of_nodes, timesteps), dtype = 'int')
class SIR_NetworkSimulation(NetworkSimulation):
def __init__(self, Network, timesteps, start_parameters):
super().__init__(Network, timesteps)
self.timesteps = timesteps
self.S = self.node_population.copy()
self.I, self.R = self.S.copy(), self.S.copy()
self.SIR = np.zeros((3, self.timesteps))
self.node_population[:,0] = start_parameters['node_populations']
self.travel_matrix = self.get_travel_matrix()
self.start_num_infected = start_parameters['infected']
self.citykey = start_parameters['city']
self.get_first_infected_city()
def get_travel_matrix(self):
travel_matrix = self.A.copy()
for i in range(self.number_of_nodes):
travel_matrix[i,:] = self.travel_rate * self.node_population[i, 0] / self.node_degree[i] * self.A[i, :]
travel_matrix = np.floor(travel_matrix)
return travel_matrix
def get_first_infected_city(self):
for i in range(self.number_of_nodes):
if self.nodes[i] == self.citykey:
start_city_index = i
self.I[start_city_index, 0] = np.random.randint(self.start_num_infected) # Number of infected in start city
self.S[:,0] = self.node_population[:,0] - self.I[:,0] # Defining the number of susceptible in each city at t=0
self.SIR[:, 0] = np.sum(self.S[:,0]), np.sum(self.I[:,0]), np.sum(self.R[:,0])
def simulate(self):
for t in range(self.timesteps-1):
nodal_infection_ratio = self.I[:, t]/self.node_population[:, t]
dIdt = self.travel_infection_rate * np.dot(self.travel_matrix, nodal_infection_ratio)
dIdt = np.floor(dIdt)
dIdt = np.random.poisson(dIdt)
print(f"Timestep: {t+1} of { self.timesteps-1}")
self.I[:, t] = self.I[:, t] + dIdt
self.S[:, t] = self.S[:, t] - dIdt
dSdt = -self.beta * self.I[:, t] * self.S[:, t] / self.node_population[:, t]
dIdt = self.beta * self.I[:, t] * self.S[:, t] / self.node_population[:, t] - self.gamma * self.I[:, t]
dRdt = self.gamma*self.I[:, t]
self.S[:, t+1] = self.S[:, t] + dSdt
self.I[:, t+1] = self.I[:, t] + dIdt
self.R[:, t+1] = self.R[:, t] + dRdt
self.node_population[:, t+1] = self.S[:, t+1] + self.I[:, t+1] + self.R[:, t+1]
self.SIR[:, t+1] = np.sum(self.S[:,t+1]), np.sum(self.I[:,t+1]), np.sum(self.R[:,t+1])
``` |
{
"source": "jonand9010/Peak_fitter",
"score": 3
} |
#### File: Peak_fitter/Dashboard/PeakDash.py
```python
import dash
from dash.dependencies import Input, Output, State
import plotly.graph_objects as go
import plotly.express as px
import numpy as np
import pandas as pd
from inspect import getmembers, isclass
from layouts.app_layout import App_Layout
from gui_utils.helper_functions import read_datafile, model_selection, Table
import dash_table
from dash_table.Format import Format
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
Peak_fit_dashboard = dash.Dash(__name__, external_stylesheets = external_stylesheets)
Peak_fit_dashboard.layout = App_Layout()
Table_fitresults = Table()
@Peak_fit_dashboard.callback(Output('Raw-data-plot', 'figure'), Input('data-import', 'contents'))
def plot_raw_data(data):
datafile = read_datafile(data)
try:
df = pd.read_csv(datafile, sep = ';', names = ['x', 'y'])
fig = px.scatter(df, x = df['x'], y = df['y'])
except:
fig = go.Figure()
return fig
@Peak_fit_dashboard.callback([Output('Model-fit-plot', 'figure'), Output('Residuals-plot', 'figure'), Output('Fit-Results', 'data'),
Output('Fit-Results', 'columns'), Output('clear_button', 'n_clicks')], [Input('data-import', 'contents'),
Input('Model-selection', 'value'), Input('clear_button', 'n_clicks'), Input('Raw-data-plot', 'relayoutData')] )
def plot_model_fit(data, model_str, n_clicks, relayout_data, *figures):
datafile = read_datafile(data)
try:
df = pd.read_csv(datafile, sep = ';', names = ['x', 'y'])
model = model_selection(model_str)
model.fit(df['x'], df['y'])
Table_fitresults.update_table(model.parameters, model_str)
fig_model = px.scatter(df, x = 'x', y = 'y')
fig_model.add_scatter(x = df['x'], y = model.predict(df['x']), mode='lines')
fig_model.update_layout(showlegend=False)
df['residuals'] = df['y'] - model.predict(df['x'])
fig_residuals = px.scatter(df, x = 'x', y = df['residuals'])
except:
fig_model = go.Figure()
fig_residuals = go.Figure()
for fig in [fig_model, fig_residuals]:
try:
fig['layout']["xaxis"]["range"] = [relayout_data['xaxis.range[0]'], relayout_data['xaxis.range[1]']]
fig['layout']["xaxis"]["autorange"] = False
except (KeyError, TypeError):
fig['layout']["xaxis"]["autorange"] = True
if (n_clicks > 0) :
Table_fitresults.__init__()
n_clicks = 0
return fig_model, fig_residuals, Table_fitresults.data, Table_fitresults.columns, n_clicks
if __name__ == '__main__': Peak_fit_dashboard.run_server(debug=True)
``` |
{
"source": "jonandergomez/machine_learning_for_students",
"score": 3
} |
#### File: machine_learning_for_students/ann/Functional.py
```python
import sys
import numpy
class Functional:
"""
"""
valid_activation_types = ['tanh', 'sigmoid', 'linear', 'linear_rectified', 'softmax', 'multinomial', 'binary']
# -------------------------------------------------------------------------
def __init__(self, input_size, input_type, output_size, output_type, scale = 1.0, learning_rate = None, alpha_tanh = 1.7159, beta_tanh = 2.0/3.0):
try:
if input_type not in Functional.valid_activation_types:
raise TypeError("'%s' is not a valid input type!" % input_type)
if output_type not in Functional.valid_activation_types:
raise TypeError("'%s' is not a valid output type!" % output_type)
if input_size < 1 or input_size > 10000:
raise TypeError("%d is not a valid input size!" % input_size)
if output_size < 1 or output_size > 10000:
raise TypeError("%d is not a valid input size!" % output_size)
self.input_type = input_type
self.output_type = output_type
#
self.input_bias = numpy.zeros(input_size)
self.output_bias = numpy.zeros(output_size)
#
alpha = scale * numpy.sqrt(6.0 / (input_size + output_size))
self.weights = (2 * alpha) * numpy.random.rand(output_size, input_size) - alpha
self.mean_forward = numpy.zeros(output_size)
self.sigma_forward = numpy.ones( output_size)
self.mean_backward = numpy.zeros( input_size)
self.sigma_backward = numpy.ones( input_size)
self.learning_rate = learning_rate
self.learning_rates = None
self.alpha_tanh = alpha_tanh
self.beta_tanh = beta_tanh
except TypeError as detail:
print('SEVERE ERROR %s ' % detail)
sys.exit(-1)
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def get_input_size(self): return self.weights.shape[1]
# -------------------------------------------------------------------------
def get_output_size(self): return self.weights.shape[0]
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_forward(self, v, drop_out_mask = None, drop_out_coeff = 1.0, noise_sigma = 0.0):
net = self.compute_net_forward(v, drop_out_coeff)
if noise_sigma > 0.0 : net = net + noise_sigma * numpy.random.randn(net.shape[0], net.shape[1])
if drop_out_mask is not None: net = net * drop_out_mask
a = self.compute_activation_forward(net)
if drop_out_mask is not None: a = a * drop_out_mask
return net, a
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_backward(self, h, drop_out_mask = None, drop_out_coeff = 1.0, noise_sigma = 0.0):
net = self.compute_net_backward(h, drop_out_coeff)
if noise_sigma > 0.0 : net = net + noise_sigma * numpy.random.randn(net.shape[0], net.shape[1])
if drop_out_mask is not None: net = net * drop_out_mask
a = self.compute_activation_backward(net)
if drop_out_mask is not None: a = a * drop_out_mask
return net, a
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_net_forward(self, v, drop_out_coeff = 1.0):
preactivation = drop_out_coeff * numpy.dot(self.weights, v) + self.output_bias[:, numpy.newaxis]
#if hasattr(self,'mean_forward'):
if self.mean_forward is not None:
preactivation = (preactivation - self.mean_forward[:, numpy.newaxis]) / self.sigma_forward[:, numpy.newaxis]
return preactivation
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_net_backward(self, h, drop_out_coeff = 1.0):
preactivation = drop_out_coeff * numpy.dot(self.weights.transpose(), h) + self.input_bias[:, numpy.newaxis]
#if hasattr(self,'mean_forward'):
if self.mean_forward is not None:
preactivation = (preactivation - self.mean_backward[:, numpy.newaxis]) / self.sigma_backward[:, numpy.newaxis]
return preactivation
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_activation(self, activation_type, net):
if activation_type == 'tanh':
a = self.alpha_tanh * numpy.tanh(self.beta_tanh * net)
elif activation_type == 'sigmoid' :
temp = numpy.maximum(-40.0, net)
temp = numpy.minimum( 40.0, temp)
a = 1.0 / (1.0 + numpy.exp(-temp))
elif activation_type == 'binary':
temp = numpy.maximum(-40.0, net)
temp = numpy.minimum( 40.0, temp)
a = 1.0 / (1.0 + numpy.exp(-temp))
if len(a.shape) > 1 :
thresholds = numpy.random.rand(a.shape[0], a.shape[1])
else:
thresholds = numpy.random.rand(len(a))
a[a >= thresholds] = 1.0
a[a < thresholds] = 0.0
elif activation_type == 'linear_rectified':
# Sharp version of the linear rectified activation function.
a = numpy.maximum(0.0, net)
# Comment the previous line and uncomment the following lines for
# using the soft version of the linear rectified activation function.
# In case of using the soft version do the proper changes in the method
# compute_activation_derivative() that appears below.
#
#temp = numpy.maximum(-40.0, net)
#temp = numpy.minimum( 40.0, temp)
#a = numpy.log( 1 + numpy.exp(temp))
elif activation_type == 'linear' :
a = net
elif activation_type == 'softmax':
den = -numpy.inf
for i in range(len(net)): den = numpy.logaddexp(den, net[i])
a = numpy.exp(net - den)
elif activation_type == 'multinomial': # incomplete
den = -numpy.inf
for i in range(len(net)): den = numpy.logaddexp(den, net[i])
a = numpy.exp(net - den)
# Here it remains to multiply 'a' by the sum of all the inputs.
else:
sys.exit(1)
return a
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_activation_forward(self, net):
return self.compute_activation(self.output_type, net)
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_activation_backward(self, net):
return self.compute_activation(self.input_type, net)
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def compute_activation_derivative(self, activation_type, a, net):
if activation_type == 'tanh':
ad = self.beta_tanh * (self.alpha_tanh -(a * a) / self.alpha_tanh)
elif activation_type == 'sigmoid' :
ad = a * (1 - a)
elif activation_type == 'binary' :
# Because 'a' comes binarized and the derivative should be computed before
temp = numpy.maximum(-40.0, net)
temp = numpy.minimum( 40.0, temp)
temp_a = 1.0 / (1.0 + numpy.exp(-temp))
ad = temp_a * (1 - temp_a)
elif activation_type == 'linear_rectified':
# Sharp version of the linear rectified activation function
ad = numpy.ones(a.shape)
ad[net <= 0.0] = 0.0
# Soft version of the linear rectified activation function
#temp = numpy.maximum(-40.0, net)
#temp = numpy.minimum( 40.0, temp)
#ad = 1.0 / (1.0 + numpy.exp(-temp))
elif activation_type in ['linear', 'softmax', 'multinomial']:
ad = numpy.ones(a.shape)
else:
sys.exit(1)
return ad
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def max_norm(self, max_norm_value = 3.5):
w_norm = numpy.linalg.norm(self.weights, axis = 1)
for i in range(w_norm.shape[0]):
if w_norm[i] > max_norm_value:
self.weights[i] = max_norm_value * self.weights[i] / w_norm[i]
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def backward_propagation(self, error_output, net_output, act_output, act_input):
# delta is a matrix with 'output_size' rows and 'batch_size' columns
# Here the elementwise product should be used
delta = error_output * self.compute_activation_derivative(self.output_type, act_output, net_output)
# error_input is a matrix with 'input_size' rows and 'batch_size' columns
#
# It is the error for the layer below.
# Each column of this matrix is a vector with the error corresponding to
# a given sample. Each component of the column vectors corresponds to
# the dot/inner/scalar product of the delta corresponding to the backpropagated
# error and the weights related with each input unit.
#
# for an input unit j, error_input[j] = sum(k = 1, K)(delta[k] * weights[k, j])
#
# So for obtaining the whole error vector
#
# error_input = dot(delta, weights[:, j])
#
# When using a mini-batch error_input and delta are not vectors, they are matrices
# with one column corresponding to a different sample, so the error for a given sample
# will be computed as follows
#
# error_input[:,b] = dot(delta[:,b], weights[:, j])
#
# that in matrix form is
#
# error_input = dot(weights', delta)
#
error_input = numpy.dot(self.weights.transpose(), delta)
# incr_weights is a matrix of 'output_size' x 'input_size'
# because it is the gradient for updating the weights.
#
# As explained before for computing the input error for the input layer
# of a block of two consecutive layers, when working with mini-batches
# a sample is a matrix where each column is a sample, then the number
# of rows is the number of features (the dimensionality). For layers
# a any level, the input dimension of the number of units (neurons) in
# the input layer and the output dimension is the number of units (neurons)
# in the output layer, so, the matrix of weights between two consecutive
# layers is a matrix with a number of rows equal to the output dimension
# and number of columns equal to the input dimension.
#
# The accumulated gradient for a given mini-batch of samples can be computed
# in matrix form as follows. Then, each component of the incr_weights matrix
# contains the accumulated gradient corresponding to the mini-batch, in such
# a way that the weights are going to be updated once per mini-batch but with
# the accumulated gradient.
#
incr_weights = numpy.dot(delta, act_input.transpose())
# incr_output_bias is a vector with 'output_size' elements
#
# Here applies the same idea, the output bias are updated with the accumulated
# gradient for all the samples in the mini-batch. In this case it is the sum
# of each dimension of the output bias, so the result is a column vector whose
# components are the sum of all the components in the same row of the delta
# matrix.
#
incr_output_bias = numpy.sum(delta, axis = 1)
if self.mean_forward is not None:
#
incr_weights /= self.sigma_forward[:, numpy.newaxis]
incr_output_bias /= self.sigma_forward
#
incr_mean_forward = - incr_output_bias / self.sigma_forward
incr_sigma_forward = + ((delta * net_output) / self.sigma_forward[:, numpy.newaxis]).sum(axis = 1)
else:
incr_mean_forward = None
incr_sigma_forward = None
return error_input, incr_weights, incr_output_bias, incr_mean_forward, incr_sigma_forward
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def forward_propagation(self, error_input, net_input, act_input, act_output):
# delta is a matrix with 'input_size' rows and 'batch_size' columns
delta = error_input * self.compute_activation_derivative(self.input_type, act_input, net_input)
# error_output is a matrix with 'output_size' rows and 'batch_size' columns, it is the error for the layer above
error_output = numpy.dot(self.weights, delta)
# incr_weights is a matrix of 'output_size' x 'input_size'
incr_weights = numpy.dot(act_output, delta.transpose())
# incr_input_bias is a matrix of 'input_size'
incr_input_bias = numpy.sum(delta, axis = 1)
if self.mean_backward is not None:
#
incr_weights /= self.sigma_backward[numpy.newaxis, :]
incr_input_bias /= self.sigma_backward
#
incr_mean_backward = - incr_input_bias / self.sigma_backward
incr_sigma_backward = + ((delta * net_input) / self.sigma_backward[:, numpy.newaxis]).sum(axis = 1)
else:
incr_mean_backward = None
incr_sigma_backward = None
return error_output, incr_weights, incr_input_bias, incr_mean_backward, incr_sigma_backward
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
def contrastive_divergence(self, vpos):
net1, hpos = self.compute_forward( vpos)
net2, vneg = self.compute_backward(hpos)
net3, hneg = self.compute_forward( vneg)
if len(vpos.shape) > 1 :
incrW = numpy.dot(hpos, vpos.transpose()) - numpy.dot(hneg, vneg.transpose())
incrObias = numpy.sum((hpos - hneg), axis = 1)
incrIbias = numpy.sum((vpos - vneg), axis = 1)
else:
incrW = numpy.outer(hpos, vpos) - numpy.outer(hneg, vneg)
incrObias = hpos - hneg
incrIbias = vpos - vneg
vbias_term = 0.0
if self.input_type in ['linear'] : # , 'linear_rectified'] :
temp = vpos - self.input_bias[:, numpy.newaxis]
vbias_term = -0.5 * (temp * temp).sum()
else:
vbias_term = numpy.dot(self.input_bias, vpos).sum()
if self.output_type in ['linear'] : # , 'linear_rectified'] :
temp = hpos - self.output_bias[:, numpy.newaxis]
hidden_term = - 0.5 * (temp * temp).sum()
#for i in range(vpos.shape[1]):
# hidden_term = hidden_term + numpy.dot( numpy.dot(hpos[:,i], self.weights), vpos[:, i])
hidden_term = hidden_term + numpy.dot(numpy.dot(hpos.transpose(), self.weights), vpos).sum()
else:
# For avoiding excess in the computation of log(1+exp(x))
net1 = numpy.minimum( 40.0, net1)
net1 = numpy.maximum(-40.0, net1)
hidden_term = numpy.log(1 + numpy.exp(net1)).sum()
free_energy = -vbias_term - hidden_term
#print( "W = %e %e %e fe = %e" % (self.weights.sum(), self.input_bias.sum(), self.output_bias.sum(), free_energy) )
return incrW, incrObias, incrIbias, free_energy
# -------------------------------------------------------------------------
```
#### File: machine_learning_for_students/data_tools/RangeToBitmap.py
```python
import numpy
#from matplotlib import pyplot
import sys
import os
import numpy
class RangeToBitmap:
def __init__(self, bounds = None, num_bits = None, h = None):
self.x_range = numpy.linspace(bounds[0], bounds[1], num_bits)
if h is None:
h = (bounds[1] - bounds[0]) / (2 * num_bits + 1)
self.h = h
try:
self.alpha = -0.5 / (h * h)
except:
print(bounds)
print(num_bits)
sys.exit(1)
def bitmap(self, value):
y = value - self.x_range
y = numpy.exp(self.alpha * y * y)
return y / y.sum()
def __len__(self):
return len(self.x_range)
if __name__ == '__main__':
from bokeh.plotting import figure, output_file, show
rtb = RangeToBitmap(bounds = [numpy.log(1.0e-5), 0.0], num_bits = 10, h = None)
output_file('/tmp/rtb.html')
p = figure(title = 'Checking', x_range = [-16,3], x_axis_label = 'x', y_axis_label = 'bitmap value', width = 900)
i = 1
values = numpy.linspace(-20.0, 2.0, 10)
for x in values:
print(x)
y = rtb.bitmap(x)
print(y)
color = "#%02x%02x%02x" % (int((i * 255) / len(values)), 150, 150)
p.line(rtb.x_range, y, legend = '%.4f' % x, line_width = 2, line_color = color)
i += 1
show(p)
```
#### File: machine_learning_for_students/data_tools/smoothing.py
```python
import numpy
#from matplotlib import pyplot
import numpy
def smoothing_by_kernel(y_from = None, n_from = None, n_to = None, duration = 10.0, h = 0.05):
"""
y_from : is the array with the input signal to be smoothed and embeeded
into a new array with 'n_to' values
n_from : number of points from 'y_from' to consider, if it is none all
the points in 'y_from' are used
n_to : number of points for the smoothed signal
duration : is the time in seconds the input signal corresponds to
h : radius of Gaussian kernel used as the smoothing window
returns 'y_to' containing the smoothed signal with 'n_to' points
advice : 'duration' and 'h' are closely related, please use the proper values
of these parameters according to your data
"""
#
if y_from is None : raise Exception("Impossible reshaping with no data!")
if n_from is None : n_from = len(y_from)
if n_to is None : n_to = n_from
#
# 'x_from' contains the values of x for the input signal equally spaced
x_from = numpy.linspace(0.0, duration, n_from)
# 'x_to' contains the values of x for the smoothed output signal equally spaced
x_to = numpy.linspace(0.0, duration, n_to)
# reservation of space for 'y_to'
y_to = numpy.zeros(n_to)
# comuputation of the smoothed signal, each point in the input signal
# contributes to estimated the value of all the points int the smoothed
# output signal, but with a weight defined by the Gaussian window of
# radius 'h'
for t in range(n_to):
y_filter = numpy.exp(-0.5 * ((x_from - x_to[t]) / h) ** 2)
y_to[t] = (y_filter * y_from[:n_from]).sum() / y_filter.sum()
return y_to
``` |
{
"source": "jonandergomez/teaa_lab",
"score": 3
} |
#### File: examples/python/see_accuracy_evolution.py
```python
import os
import sys
import numpy
from matplotlib import pyplot
if __name__ == '__main__':
task = 'classification' # 'prediction'
clustering = 'kmeans' # 'gmm'
results_dir = 'results'
for i in range(len(sys.argv)):
if sys.argv[i] == '--classification':
task = 'classification'
elif sys.argv[i] == '--prediction':
task = 'prediction'
elif sys.argv[i] == '--kmeans':
clustering = 'kmeans'
elif sys.argv[i] == '--gmm':
clustering = 'gmm'
elif sys.argv[i] == '--results-dir':
results_dir = sys.argv[i + 1]
if clustering == 'kmeans':
prefix = f'{task}-results'
elif clustering == 'gmm':
prefix = f'{clustering}-{task}-results'
else:
raise Exception(f'Invalid clustering type: {clustering}')
suffix = '.txt'
def extract_num_clusters(filename):
parts = filename.split(sep = '-')
str_n = parts[-1].split(sep = '.')[0]
return int(str_n)
def get_accuracy(filename):
accuracy = None
f = open(filename, 'rt')
for line in f:
parts = line.split()
if len(parts) == 3 and parts[0] == 'accuracy':
accuracy = float(parts[1])
f.close()
if accuracy is None:
raise Exception(f'accuracy not found in {filename}')
return accuracy
data = list()
for root, dirs, files in os.walk(results_dir):
#print(root, len(dirs), len(files))
filenames = [f for f in filter(lambda fname: fname.startswith(prefix) and fname.endswith(suffix), files)]
for filename in filenames:
data.append((extract_num_clusters(filename), get_accuracy(f'{root}/{filename}')))
data.sort(key = lambda x: x[0])
data = numpy.array(data)
filename = f'{root}/accuracy-{clustering}-{task}'
fig, axes = pyplot.subplots(nrows = 1, ncols = 1, figsize = (9, 6))
#
axis = axes
axis.set_facecolor('#eefffc')
axis.plot(data[:,0], data[:, 1], 'ro-', alpha = 1.0)
axis.grid()
axis.set_xlabel('Number of clusters')
axis.set_ylabel('Accuracy')
axis.set_title('Accuracy evolution according to the number of clusters')
pyplot.tight_layout()
for file_format in ['png', 'svg']:
pyplot.savefig(f'{filename}.{file_format}', format = file_format)
print(f'{filename}.{file_format} created')
del fig
``` |
{
"source": "jonandergomez/use_case_pipeline",
"score": 2
} |
#### File: use_case_pipeline/python/pneumothorax_segmentation_inference.py
```python
import argparse
import os
import pyeddl.eddl as eddl
from pyeddl.tensor import Tensor
import pyecvl.ecvl as ecvl
import utils
from models import SegNetBN
def main(args):
num_classes = 1
size = [512, 512] # size of images
thresh = 0.5
if args.out_dir:
os.makedirs(args.out_dir, exist_ok=True)
in_ = eddl.Input([1, size[0], size[1]])
out = SegNetBN(in_, num_classes)
out_sigm = eddl.Sigmoid(out)
net = eddl.Model([in_], [out_sigm])
eddl.build(
net,
eddl.adam(0.0001),
["cross_entropy"],
["mean_squared_error"],
eddl.CS_GPU([1]) if args.gpu else eddl.CS_CPU()
)
eddl.summary(net)
eddl.setlogfile(net, "pneumothorax_segmentation_inference")
if not os.path.exists(args.ckpts):
raise RuntimeError('Checkpoint "{}" not found'.format(args.ckpts))
eddl.load(net, args.ckpts, "bin")
training_augs = ecvl.SequentialAugmentationContainer([
ecvl.AugResizeDim(size),
])
test_augs = ecvl.SequentialAugmentationContainer([
ecvl.AugResizeDim(size),
])
dataset_augs = ecvl.DatasetAugmentations([training_augs, None, test_augs])
print("Reading dataset")
d = ecvl.DLDataset(args.in_ds, args.batch_size, dataset_augs,
ecvl.ColorType.GRAY)
x = Tensor([args.batch_size, d.n_channels_, size[0], size[1]])
print("Testing")
d.SetSplit(ecvl.SplitType.test)
num_samples_test = len(d.GetSplit())
num_batches_test = num_samples_test // args.batch_size
evaluator = utils.Evaluator()
evaluator.ResetEval()
for b in range(num_batches_test):
n = 0
print("Batch {:d}/{:d} ".format(
b + 1, num_batches_test), end="", flush=True)
d.LoadBatch(x)
x.div_(255.0)
eddl.forward(net, [x])
if args.out_dir:
output = eddl.getOutput(out_sigm)
for k in range(args.batch_size):
img = output.select([str(k)])
img_I = ecvl.TensorToImage(img)
img_I.colortype_ = ecvl.ColorType.GRAY
img_I.channels_ = "xyc"
ecvl.Threshold(img_I, img_I, thresh, 255)
filename = d.samples_[d.GetSplit()[n]].location_[0]
head, tail = os.path.splitext(os.path.basename(filename))
bname = "{}.png".format(head)
output_fn = os.path.join(args.out_dir, bname)
ecvl.ImWrite(output_fn, img_I)
n += 1
print()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("in_ds", metavar="INPUT_DATASET")
parser.add_argument("ckpts", metavar='CHECKPOINTS_PATH',
default='./pneumothorax_segnetBN_adam_lr_0.0001_loss_'
'ce_size_512_epoch_44.bin')
parser.add_argument("--batch-size", type=int, metavar="INT", default=8)
parser.add_argument("--gpu", action="store_true")
parser.add_argument("--out-dir", metavar="DIR",
help="if set, save images in this directory")
main(parser.parse_args())
```
#### File: use_case_pipeline/python/utils.py
```python
import numpy as np
class Evaluator:
def __init__(self):
self.eps = 1e-06
self.buf = []
def ResetEval(self):
self.buf = []
def BinaryIoU(self, a, b, thresh=0.5):
intersection = np.logical_and(a >= thresh, b >= thresh).sum()
union = np.logical_or(a >= thresh, b >= thresh).sum()
rval = (intersection + self.eps) / (union + self.eps)
self.buf.append(rval)
return rval
def DiceCoefficient(self, a, b, thresh=0.5):
a = Threshold(a, thresh)
b = Threshold(b, thresh)
intersection = np.logical_and(a, b).sum()
rval = (2 * intersection + self.eps) / (a.sum() + b.sum() + self.eps)
self.buf.append(rval)
return rval
def MIoU(self):
if not self.buf:
return 0
return sum(self.buf) / len(self.buf)
MeanMetric = MIoU
def Threshold(a, thresh=0.5):
a[a >= thresh] = 1
a[a < thresh] = 0
return a
def ImageSqueeze(img):
k = img.dims_.index(1)
img.dims_ = [_ for i, _ in enumerate(img.dims_) if i != k]
img.strides_ = [_ for i, _ in enumerate(img.strides_) if i != k]
k = img.channels_.find("z")
img.channels_ = "".join([_ for i, _ in enumerate(img.channels_) if i != k])
``` |
{
"source": "JonaNeyra/api-account-transactions",
"score": 3
} |
#### File: src/domain/request.py
```python
class Eval:
def __init__(self, data):
self.data = data
def set(self, key, required=True):
if not self.data:
return None
if key not in self.data and required:
return None
if required and self.data[key] is None:
return None
return self.data[key] if key in self.data else None
class DepositEvent:
def __init__(self, data):
valid = Eval(data)
self.type = valid.set('type')
self.destination = valid.set('destination')
self.amount = valid.set('amount')
class WithdrawEvent:
def __init__(self, data):
valid = Eval(data)
self.type = valid.set('type')
self.origin = valid.set('origin')
self.amount = valid.set('amount')
class TransferEvent:
def __init__(self, data):
valid = Eval(data)
self.type = valid.set('type')
self.origin = valid.set('origin')
self.amount = valid.set('amount')
self.destination = valid.set('destination')
``` |
{
"source": "jonanone/APIProject",
"score": 3
} |
#### File: vagrant/restaurant_menu/webserver.py
```python
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import cgi
from database_helper import db_init, get_restaurants, add_restaurant
from database_helper import get_restaurant, edit_restaurant, delete_restaurant
session = db_init()
class webserverHandler(BaseHTTPRequestHandler):
def do_GET(self):
try:
# Hello World
if self.path.endswith("/hello"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
output = ""
output += "<html><body>"
output += "Hello!"
output += "<form method='POST' enctype='multipart/form-data' \
action='/hello'><h2>What would you like me to say?</h2>\
<input name='message' type='text'><input type='submit' \
value='Submit'></form>"
output += "</body></html>"
self.wfile.write(output)
print output
return
# <NAME>
if self.path.endswith("/hola"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
output = ""
output += "<html><body>"
output += "Hola! <a href='/hello'>\
Back to Hello</a>"
output += "<form method='POST' enctype='multipart/form-data' \
action='/hello'><h2>What would you like me to say?</h2>\
<input name='message' type='text'><input type='submit' \
value='Submit'></form>"
output += "</body></html>"
self.wfile.write(output)
print output
return
# List restaurants
if self.path.endswith("/restaurants"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
restaurants = get_restaurants(session)
output = ""
output += "<html><body>"
output += "<h1>Restaurants</h1>"
output += "<a href='restaurants/new'>Add new restaurant</a>"
output += "<ul>"
for restaurant in restaurants:
output += "<li>"
output += "<p>" + restaurant.name + "</p>"
output += "<div><a href='restaurant/" + str(restaurant.id) + "/edit'>Edit</a></div>"
output += "<div><a href='restaurant/" + str(restaurant.id) + "/delete'>Delete</a></div>"
output += "</li>"
output += "</ul>"
output += "</body></html>"
self.wfile.write(output)
print output
return
# Add new restaurant
if self.path.endswith("/restaurants/new"):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
output = ""
output += "<html><body>"
output += "<h1>Make a new restaurant</h1>"
output += "<form method='POST' enctype='multipart/form-data' \
action='/restaurants/new'><input name='name' \
type='text'><input type='submit' value='Create'></form>"
output += "</body></html>"
self.wfile.write(output)
print output
return
# Edit a given restaurant
if self.path.endswith("/edit"):
restaurant_id = self.path.split('/')[2]
restaurant = get_restaurant(session, restaurant_id)
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
output = ""
output += "<html><body>"
output += "<h1>Edit a restaurant</h1>"
output += "<h2> %s </h2>" % restaurant.name
output += "<form method='POST' enctype='multipart/form-data' \
action='/restaurants/" + restaurant_id + "/edit'><input name='name' \
type='text'><input type='submit' value='Save'></form>"
output += "</body></html>"
self.wfile.write(output)
print output
return
# Delete a given restaurant
if self.path.endswith("/delete"):
restaurant_id = self.path.split('/')[2]
restaurant = get_restaurant(session, restaurant_id)
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
output = ""
output += "<html><body>"
output += "<h1>Are you sure you want to delete %s restaurant?</h1>" % restaurant.name
output += "<form method='POST' enctype='multipart/form-data' \
action='/restaurants/" + restaurant_id + "/delete'> \
<input type='submit' value='Delete'></form> \
<a href='/restaurants'>No, go back to restaurants</a>"
output += "</body></html>"
self.wfile.write(output)
print output
return
except IOError:
self.send_error(404, "File Not Found %s" % self.path)
def do_POST(self):
try:
# Say hello
if self.path.endswith("/hello"):
self.send_response(301)
self.end_headers()
ctype, pdict = cgi.parse_header(
self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
fields = cgi.parse_multipart(self.rfile, pdict)
messagecontent = fields.get('message')
output = ""
output += "<html><body>"
output += "<h2> Okay, how about this: </h2>"
output += "<h1> %s </h1>" % messagecontent[0]
output += "<form method='POST' enctype='multipart/form-data' \
action='/hello'><h2>What would you like me to say?</h2>\
<input name='message' type='text'><input type='submit' \
value='Submit'></form>"
output += "</body></html>"
self.wfile.write(output)
print output
# Add new restaurant POST
if self.path.endswith("/restaurants/new"):
ctype, pdict = cgi.parse_header(
self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
fields = cgi.parse_multipart(self.rfile, pdict)
data = {}
for field in fields:
data[field] = fields.get(field)[0]
new_restaurant = add_restaurant(session, data)
print new_restaurant.name + ' added.'
self.send_response(301)
self.send_header('Content-type', 'text/html')
self.send_header('Location', '/restaurants')
self.end_headers()
print output
# Edit a given restaurant POST
if self.path.endswith("/edit"):
ctype, pdict = cgi.parse_header(
self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
fields = cgi.parse_multipart(self.rfile, pdict)
data = {}
for field in fields:
data[field] = fields.get(field)[0]
restaurant_id = self.path.split('/')[2]
edited_restaurant = edit_restaurant(session, restaurant_id, data)
print edited_restaurant.name + ' edited.'
self.send_response(301)
self.send_header('Content-type', 'text/html')
self.send_header('Location', '/restaurants')
self.end_headers()
print output
# Delete a given restaurant POST
if self.path.endswith("/delete"):
restaurant_id = self.path.split('/')[2]
restaurant_deleted = delete_restaurant(session, restaurant_id)
if restaurant_deleted:
print 'Restaurant deleted.'
self.send_response(301)
self.send_header('Content-type', 'text/html')
self.send_header('Location', '/restaurants')
self.end_headers()
print output
except:
pass
def main():
try:
port = 8080
server = HTTPServer(('', port), webserverHandler)
print "Web server running on port %s" % port
server.serve_forever()
except KeyboardInterrupt:
print "^C entered, stopping web server..."
server.socket.close()
if __name__ == '__main__':
main()
``` |
{
"source": "JonanOribe/Jarvis",
"score": 2
} |
#### File: jarviscli/plugins/electron.py
```python
import sys
import os
import threading
import subprocess
from plugin import plugin
from npm.bindings import npm_run
from subprocess import call
@plugin("electron")
def electron(jarvis, s):
try:
ElectronThread = threading.Thread(target=launchElectron)
NodeThread = threading.Thread(target=callToNode)
ElectronThread.start()
NodeThread.start()
NodeThread.join()
# ElectronThread.join()
# NodeThread.stop()
# ElectronThread.stop()
except:
print('GUI has been closed')
try:
ElectronThread.stop()
except:
print('Destroying threads')
def launchElectron():
# Open a browser for creating a Jarvis GUI with HTML,CSS and Javascript
print("Loading...")
pkg = npm_run('start')
def callToNode():
# Call to the Node chatbot
cwd = os.getcwd()
call(["node", cwd + "/examples/console-bot/"])
class StoppableThread(threading.Thread):
"""Thread class with a stop() method. The thread itself has to check
regularly for the stopped() condition."""
def __init__(self):
super(StoppableThread, self).__init__()
self._stop_event = threading.Event()
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
``` |
{
"source": "JonanOribe/Pybook",
"score": 3
} |
#### File: Pybook/PyBook/views.py
```python
from django.http import HttpResponse
from datetime import datetime
import json
def server_time(request):
now = datetime.now()
current_time = '{}{}'.format('Current server time is ',now)
return HttpResponse(current_time)
def numbers(request):
sorted_numbers = sorted([int(i) for i in request.GET['numbers'].split(',')])
data = {
'status': 'ok',
'numbers': sorted_numbers,
'message': 'Integers sorted successfully'
}
return HttpResponse(json.dumps(data, indent=4), content_type='application/json')
def say_hi(request, name, age):
if age<12:
message = 'Sorry {}, you are not allowed here'.format(name)
else:
message = 'Hello, {}! Welcome to PyBook'.format(name)
return HttpResponse(message)
``` |
{
"source": "JonanOribe/PyRetailMarket",
"score": 3
} |
#### File: src/models/accesory.py
```python
class Accesory():
def __init__(self, accesory_type: int):
self._accesory_type = accesory_type
@property
def accesory_type(self):
return self.accesory_type
@accesory_type.setter
def accesory_type(self, new_accesory_type: int):
if type(new_accesory_type) == str:
self._accesory_type = new_accesory_type
else:
raise Exception("Invalid value for accesory_type")
``` |
{
"source": "jonans/pylxd",
"score": 2
} |
#### File: pylxd/integration/test_cluster_members.py
```python
from integration.testing import IntegrationTestCase
class ClusterMemberTestCase(IntegrationTestCase):
def setUp(self):
super(ClusterMemberTestCase, self).setUp()
if not self.client.has_api_extension("clustering"):
self.skipTest("Required LXD API extension not available!")
class TestClusterMembers(ClusterMemberTestCase):
"""Tests for `Client.cluster_members.`"""
def test_get(self):
"""A cluster member is fetched by its name."""
members = self.client.cluster.members.all()
random_member_name = "%s" % members[0].server_name
random_member_url = "%s" % members[0].url
member = self.client.cluster.members.get(random_member_name)
new_url = "%s" % member.url
self.assertEqual(random_member_url, new_url)
```
#### File: pylxd/migration/testing.py
```python
import random
import string
import unittest
import uuid
from integration.busybox import create_busybox_image
from pylxd import exceptions
from pylxd.client import Client
class IntegrationTestCase(unittest.TestCase):
"""A base test case for pylxd integration tests."""
def setUp(self):
super(IntegrationTestCase, self).setUp()
self.client = Client()
self.lxd = self.client.api
def generate_object_name(self):
"""Generate a random object name."""
# Underscores are not allowed in container names.
test = self.id().split(".")[-1].replace("_", "")
rando = str(uuid.uuid1()).split("-")[-1]
return "{}-{}".format(test, rando)
def create_container(self):
"""Create a container in lxd."""
fingerprint, alias = self.create_image()
name = self.generate_object_name()
machine = {
"name": name,
"architecture": "2",
"profiles": ["default"],
"ephemeral": False,
"config": {"limits.cpu": "2"},
"source": {"type": "image", "alias": alias},
}
result = self.lxd["containers"].post(json=machine)
operation_uuid = result.json()["operation"].split("/")[-1]
result = self.lxd.operations[operation_uuid].wait.get()
self.addCleanup(self.delete_container, name)
return name
def delete_container(self, name, enforce=False):
"""Delete a container in lxd."""
# enforce is a hack. There's a race somewhere in the delete.
# To ensure we don't get an infinite loop, let's count.
count = 0
try:
result = self.lxd["containers"][name].delete()
except exceptions.LXDAPIException as e:
if e.response.status_code in (400, 404):
return
raise
while enforce and result.status_code == 404 and count < 10:
try:
result = self.lxd["containers"][name].delete()
except exceptions.LXDAPIException as e:
if e.response.status_code in (400, 404):
return
raise
count += 1
try:
operation_uuid = result.json()["operation"].split("/")[-1]
result = self.lxd.operations[operation_uuid].wait.get()
except KeyError:
pass # 404 cases are okay.
def create_image(self):
"""Create an image in lxd."""
path, fingerprint = create_busybox_image()
with open(path, "rb") as f:
headers = {
"X-LXD-Public": "1",
}
response = self.lxd.images.post(data=f.read(), headers=headers)
operation_uuid = response.json()["operation"].split("/")[-1]
self.lxd.operations[operation_uuid].wait.get()
alias = self.generate_object_name()
response = self.lxd.images.aliases.post(
json={"description": "", "target": fingerprint, "name": alias}
)
self.addCleanup(self.delete_image, fingerprint)
return fingerprint, alias
def delete_image(self, fingerprint):
"""Delete an image in lxd."""
try:
self.lxd.images[fingerprint].delete()
except exceptions.LXDAPIException as e:
if e.response.status_code == 404:
return
raise
def create_profile(self):
"""Create a profile."""
name = self.generate_object_name()
config = {"limits.memory": "1GB"}
self.lxd.profiles.post(json={"name": name, "config": config})
return name
def delete_profile(self, name):
"""Delete a profile."""
try:
self.lxd.profiles[name].delete()
except exceptions.LXDAPIException as e:
if e.response.status_code == 404:
return
raise
def create_network(self):
# get interface name in format xxx0
name = "".join(random.sample(string.ascii_lowercase, 3)) + "0"
self.lxd.networks.post(
json={
"name": name,
"config": {},
}
)
return name
def delete_network(self, name):
try:
self.lxd.networks[name].delete()
except exceptions.NotFound:
pass
def assertCommon(self, response):
"""Assert common LXD responses.
LXD responses are relatively standard. This function makes assertions
to all those standards.
"""
self.assertEqual(response.status_code, response.json()["status_code"])
self.assertEqual(
["metadata", "operation", "status", "status_code", "type"],
sorted(response.json().keys()),
)
```
#### File: deprecated/tests/test_profiles.py
```python
from unittest import mock
from ddt import data, ddt
from pylxd.deprecated import connection
from pylxd.deprecated.tests import LXDAPITestBase, annotated_data, fake_api
@mock.patch.object(
connection.LXDConnection, "get_object", return_value=(200, fake_api.fake_profile())
)
class LXDAPIProfilesTestObject(LXDAPITestBase):
def test_list_profiles(self, ms):
ms.return_value = ("200", fake_api.fake_profile_list())
self.assertEqual(["fake-profile"], self.lxd.profile_list())
ms.assert_called_with("GET", "/1.0/profiles")
def test_profile_show(self, ms):
self.assertEqual(ms.return_value, self.lxd.profile_show("fake-profile"))
ms.assert_called_with("GET", "/1.0/profiles/fake-profile")
@ddt
@mock.patch.object(connection.LXDConnection, "get_status", return_value=True)
class LXDAPIProfilesTestStatus(LXDAPITestBase):
@data(True, False)
def test_profile_defined(self, defined, ms):
ms.return_value = defined
self.assertEqual(defined, self.lxd.profile_defined("fake-profile"))
ms.assert_called_with("GET", "/1.0/profiles/fake-profile")
@annotated_data(
("create", "POST", "", ("fake config",), ('"fake config"',)),
(
"update",
"PUT",
"/fake-profile",
(
"fake-profile",
"fake config",
),
('"fake config"',),
),
(
"rename",
"POST",
"/fake-profile",
(
"fake-profile",
"fake config",
),
('"fake config"',),
),
("delete", "DELETE", "/fake-profile", ("fake-profile",), ()),
)
def test_profile_operations(self, method, http, path, args, call_args, ms):
self.assertTrue(getattr(self.lxd, "profile_" + method)(*args))
ms.assert_called_with(http, "/1.0/profiles" + path, *call_args)
```
#### File: tests/models/test_project.py
```python
import json
from pylxd import exceptions, models
from pylxd.tests import testing
class TestProject(testing.PyLXDTestCase):
"""Tests for pylxd.models.Project."""
def test_get(self):
"""A project is fetched."""
name = "test-project"
a_project = models.Project.get(self.client, name)
self.assertEqual(name, a_project.name)
def test_get_not_found(self):
"""LXDAPIException is raised on unknown project."""
def not_found(request, context):
context.status_code = 404
return json.dumps(
{"type": "error", "error": "Not found", "error_code": 404}
)
self.add_rule(
{
"text": not_found,
"method": "GET",
"url": r"^http://pylxd.test/1.0/projects/test-project$",
}
)
self.assertRaises(
exceptions.LXDAPIException, models.Project.get, self.client, "test-project"
)
def test_get_error(self):
"""LXDAPIException is raised on get error."""
def error(request, context):
context.status_code = 500
return json.dumps(
{"type": "error", "error": "Not found", "error_code": 500}
)
self.add_rule(
{
"text": error,
"method": "GET",
"url": r"^http://pylxd.test/1.0/projects/test-project$",
}
)
self.assertRaises(
exceptions.LXDAPIException, models.Project.get, self.client, "test-project"
)
def test_exists(self):
name = "test-project"
self.assertTrue(models.Project.exists(self.client, name))
def test_not_exists(self):
def not_found(request, context):
context.status_code = 404
return json.dumps(
{"type": "error", "error": "Not found", "error_code": 404}
)
self.add_rule(
{
"text": not_found,
"method": "GET",
"url": r"^http://pylxd.test/1.0/projects/test-project$",
}
)
name = "test-project"
self.assertFalse(models.Project.exists(self.client, name))
def test_all(self):
"""A list of all projects is returned."""
projects = models.Project.all(self.client)
self.assertEqual(1, len(projects))
def test_create(self):
"""A new project is created."""
name = "new-project"
description = "new project is new"
config = {
"features.images": "true",
}
a_project = models.Project.create(
self.client, name="new-project", config=config, description=description
)
self.assertIsInstance(a_project, models.Project)
self.assertEqual(name, a_project.name)
self.assertEqual(config, a_project.config)
self.assertEqual(description, a_project.description)
def test_update(self):
"""A project is updated."""
a_project = models.Project.get(self.client, "test-project")
a_project.save()
self.assertEqual({"features.images": "true"}, a_project.config)
def test_fetch(self):
"""A partially fetched project is made complete."""
a_project = self.client.projects.all()[0]
a_project.sync()
self.assertEqual("new project is new", a_project.description)
def test_fetch_notfound(self):
"""LXDAPIException is raised on bogus project fetches."""
def not_found(request, context):
context.status_code = 404
return json.dumps(
{"type": "error", "error": "Not found", "error_code": 404}
)
self.add_rule(
{
"text": not_found,
"method": "GET",
"url": r"^http://pylxd.test/1.0/projects/test-project$",
}
)
a_project = models.Project(self.client, name="test-project")
self.assertRaises(exceptions.LXDAPIException, a_project.sync)
def test_fetch_error(self):
"""LXDAPIException is raised on fetch error."""
def error(request, context):
context.status_code = 500
return json.dumps(
{"type": "error", "error": "Not found", "error_code": 500}
)
self.add_rule(
{
"text": error,
"method": "GET",
"url": r"^http://pylxd.test/1.0/projects/test-project$",
}
)
a_project = models.Project(self.client, name="test-project")
self.assertRaises(exceptions.LXDAPIException, a_project.sync)
def test_delete(self):
"""A project is deleted."""
a_project = self.client.projects.all()[0]
a_project.delete()
``` |
{
"source": "jonanx779/python-stuffs",
"score": 4
} |
#### File: python-stuffs/decorators/decorators.py
```python
import time
import functools
import urllib3
# Following examples are related on howto create class based decorators
# in some cases we can use functools, which easily let us create a useful
# decorator, or we can create a more complex class based decorators which
# lets us pass parameters as needed.
class SlowDown(object):
"""This class based decorator uses the functools module, which is useful
if you are planning to create a simple decorator, with no params.
It is very important to set these two methods: constructor, and callable
__init__ and __call__
This is the first example
"""
def __init__(self, f_name):
functools.update_wrapper(self, f_name)
self.function_name = f_name
self.rate = 1
def __call__(self, *args, **kwargs):
self.function_name(*args, **kwargs)
print(f"Sleeping time comes...{args}")
time.sleep(1)
print(f"Waking up time...")
return self.function_name
class CountCalls(object):
"""This class based decorator uses the functools module, which is useful
if you are planning to create a simple decorator, with no params.
It is very important to set these two methods: constructor, and callable
__init__ and __call__
This is the second example
"""
def __init__(self, f_name):
functools.update_wrapper(self, f_name)
self.function_name = f_name
self.count_calls = 0
def __call__(self, *args, **kwargs):
self.count_calls += 1
print(f"Call {self.count_calls} of {self.function_name.__name__!r}")
return self.function_name(*args, **kwargs)
class CacheCalls(object):
"""This class based decorator uses the functools module, which is useful
if you are planning to create a simple decorator, with no params.
It is very important to set these two methods: constructor, and callable
__init__ and __call__
This is the third example
"""
def __init__(self, f_name):
functools.update_wrapper(self, f_name)
self.function_name = f_name
self.cache_calls = {}
def __call__(self, *args, **kwargs):
cache_key = args + tuple(kwargs.items())
if cache_key not in self.cache_calls:
self.cache_calls[cache_key] = self.function_name(*args, **kwargs)
return self.cache_calls[cache_key]
class ValidateJSON(object):
""" Class based decorator with or without params, we prepare
the constructor for receiving params as needed or not.
As the examples above, this needs to set two important methods
__init__ (to set initial config) and __call__ (last one to make this callable)
The call method also is prepared to receive the params (through the wrapper
funct) coming from the decorated function, and also we can use params that
comes from the decorator constructor.
"""
def __init__(self, *args, **kwargs):
# print('__init__', args, kwargs)
self.args = args
self.kwargs = kwargs
def __call__(self, f_name):
def wrapper(*args, **kwargs):
# print('__call__', args, kwargs)
print(f"Preprocessing {self.args}, {self.kwargs}")
if args:
print(f"wrapper: {args}")
r = f_name(*args, **kwargs)
print(f"Postprocessing", r)
return r
return wrapper
# Exercise 1: delaying 1 sec a call
@SlowDown
def countdown(from_number):
if from_number < 1:
print("Liftoff!")
else:
print(f"Hola {from_number}")
countdown(from_number - 1)
# Exercise 2: Counting how many calls a fuct receives
@CountCalls
def call_counter():
print("Calling counter!")
# Even we can create our own cache decorator (We created this just as an example)
#@CacheCalls
# It is better to use the one provided by the functools
@functools.lru_cache(maxsize=4)
#@CountCalls
def fibonacci(num):
print(f"Calculating fibonacci({num})")
if num < 2:
return num
return fibonacci(num - 1) + fibonacci(num - 2)
_REQUEST_DATA = {
"username": "Jonas",
"password": "password"
}
name = "Jonas"
# So you can choose to send N params or/and keyworks
#@ValidateJSON("student_id")
# Or you just can call the decorator without params
@ValidateJSON()
def update_grade(*args, **kwargs):
print('call my_function', args, kwargs)
``` |
{
"source": "jonaprieto/agda-pkg",
"score": 2
} |
#### File: apkg/commands/write_defaults.py
```python
import click
from os import access, W_OK
from pony.orm import db_session, select
from ..__version__ import __version__
from .init import init
from ..config import REPO, AGDA_DEFAULTS_PATH, AGDA_LIBRARIES_PATH
from ..service.logging import logger, clog
from ..service.database import Library , LibraryVersion
# ----------------------------------------------------------------------------
@db_session
def getLibraries():
return select(l for l in LibraryVersion if l.installed)[:]
@db_session
def getDefaultLibraries():
return select(l for l in Library if l.installed and l.default)[:]
@click.group()
def write_defaults(): pass
@write_defaults.command()
@click.option('--yes'
, type=bool
, is_flag=True
, help='Yes for everything.')
@clog.simple_verbosity_option(logger)
@click.pass_context
def write_defaults(ctx, yes):
"""Create/update 'defaults' and 'libraries' files for Agda."""
no_write_permission_msg = '[!] The current user has no permission to modify:'
confirm_overwrite_msg = 'already exists\nDo you want to overwrite it?'
header = "-- File generated by Agda-Pkg v{}\n".format(__version__)
overwrite_confirm = lambda f : click.confirm("[!] {} {}".format(f.as_posix(),confirm_overwrite_msg))
write_lpath, write_dpath = True, True
lpath = AGDA_LIBRARIES_PATH
dpath = AGDA_DEFAULTS_PATH
if not yes and (lpath.exists() and not overwrite_confirm(lpath)):
write_lpath = False
lpath.touch(exist_ok=True)
if write_lpath and access(lpath, W_OK):
try:
lpath.write_text(header + \
'\n'.join([v.agdaLibFilePath.as_posix() for v in getLibraries()]) + '\n')
click.echo("Updated ({})".format(lpath.as_posix()))
except Exception as e:
logger.error(e)
elif write_lpath:
click.echo(no_write_permission_msg)
click.echo(" {}".format(lpath.as_posix()))
if not yes and (dpath.exists() and not overwrite_confirm(dpath)):
write_dpath = False
dpath.touch(exist_ok=True)
if write_dpath and access(dpath, W_OK):
try:
dpath.write_text(header + \
'\n'.join(lib.name for lib in getDefaultLibraries()) + '\n')
click.echo("Updated ({})".format(dpath.as_posix()))
except Exception as e:
logger.error(e)
elif write_dpath:
click.echo(no_write_permission_msg)
click.echo(" {}".format(dpath.as_posix()))
``` |
{
"source": "jonaprieto/python-atxt",
"score": 2
} |
#### File: src/atxt/check.py
```python
from __future__ import print_function
import os
from distutils.spawn import find_executable
from log_conf import Logger
from osinfo import osinfo
import subprocess as sub
log = Logger.log
vendors = os.path.dirname(os.path.abspath(__file__))
vendors = os.path.join(vendors, 'vendors')
def check_os():
info = osinfo.OSInfo()
return info.system
def check_office():
system = check_os()
if system != 'Windows':
return False
try:
from win32com import client
msword = client.DispatchEx('Word.Application')
msword.Visible = False
msword.Quit()
log.debug('Successful Dispatching of Word.Application')
except Exception, e:
log.warning(e)
return False
def path_program(name, path=None):
system = check_os()
if not path:
path = vendors
if system == 'Windows':
name = name + '.exe' if not name.endswith('.exe') else name
path = os.path.join(vendors, system)
else:
path = os.path.join(vendors, 'Unix')
p = find_executable(name)
return p if p else find_executable(name, path=path)
def path_pdftotext():
return path_program('pdftotext')
def path_pdftopng():
return path_program('pdftopng')
def path_pdffonts():
return path_program('pdffonts')
def path_tesseract():
return path_program('tesseract')
def path_antiword():
set_antiword()
if check_os() != 'Windows':
return path_program('antiword')
path = os.path.join('C:\\', 'antiword\\')
if os.path.exists(path) and find_executable('antiword.exe', path=path):
return find_executable('antiword.exe', path=path)
path = os.path.join(vendors, 'Windows', 'antiword')
return find_executable('antiword.exe', path=path)
def set_antiword():
if check_os() == 'Windows':
p = os.path.join('C:\\', 'antiword\\')
path_anti = p
if 'PATH' in os.environ.keys():
if os.environ['PATH'].find('antiword') < 0:
cmd = ['setx', 'PATH', '{path}{anti}'.format(
path=os.environ['PATH'], anti=path_anti)]
try:
sub.call(cmd)
log.info('antiword was added to PATH variable')
except Exception, e:
log.warning(e)
if 'HOME' not in os.environ.keys():
home = os.path.expanduser('~')
cmd = ['setx', 'HOME', home]
try:
sub.call(cmd)
log.info('HOME was added as new environ variable')
except Exception, e:
log.warning(e)
def check():
p = path_pdftotext()
if p:
log.debug('successful pdftotext: %s' % p)
else:
log.warning(
'pdftotext is missing. It could not be able to treat some pdf files.')
p = path_pdftopng()
if p:
log.debug('successful pdftopng: %s' % p)
else:
log.warning(
'pdftopng is missing. (ORC will not be available for pdf files.)')
p = path_pdffonts()
if p:
log.debug('successful pdffonts: %s' % p)
else:
log.warning(
'pdffonts is missing. (ORC will not be available)')
p = path_tesseract()
if p:
log.debug('successful ocr: %s' % p)
else:
log.warning(
'tesseract is missing. OCR recognition will no be available.')
p = path_antiword()
if p:
log.debug('successful antiword: %s' % p)
else:
log.warning('antiword is missing. .DOC will not be available.')
if not check_office() and check_os == 'Windows':
log.warning(
'PyWin32 or Microsoft Office Suite is not installed or not available.')
```
#### File: atxt/formats/html.py
```python
from _utils import raw_data, save_raw_data, find_encoding
from atxt.log_conf import Logger
log = Logger.log
try:
import html2text
except:
log.critical('html2text module not installed')
log.critical('please: pip install html2text')
raise Exception('html2text module not installed')
__all__ = ['html']
def html(from_file, to_txt, opts=None):
log.debug('html2txt starting')
h = html2text.HTML2Text()
h.split_next_td = False
h.td_count = 0
h.table_start = False
h.unicode_snob = 0
h.escape_snob = 0
h.links_each_paragraph = 0
h.body_width = 78
h.skip_internal_links = True
h.inline_links = True
h.protect_links = True
h.ignore_links = True
h.ignore_images = True
h.images_to_alt = True
h.ignore_emphasis = True
h.bypass_tables = 1
h.google_doc = False
h.ul_item_mark = '*'
h.emphasis_mark = '_'
h.strong_mark = '**'
h.single_line_break = True
_encoding = find_encoding(from_file.path)
html = raw_data(from_file.path, _encoding)
if not html:
return
text = h.handle(html)
return save_raw_data(to_txt.path, text, _encoding)
```
#### File: atxt/formats/jpg.py
```python
import os
from _utils import raw_data, save_raw_data
from atxt.vendors import tesseract
from atxt.log_conf import Logger
from atxt.utils import remove
log = Logger.log
__all__ = ['jpg', 'imagen']
def jpg(from_file, to_txt, opts):
return imagen(from_file, to_txt, opts)
def imagen(from_file, to_txt, opts):
outputpath = os.path.join(from_file.dirname, 'output.txt')
log.info('tesseract is processing: {}'.format(from_file.path))
tesseract(from_file.path, None, opts)
text = ''
try:
text = raw_data(outputpath)
except Exception, e:
log.critical(e)
remove(outputpath)
return save_raw_data(to_txt.path, text)
```
#### File: atxt/formats/_utils.py
```python
import os
import chardet
import codecs
from atxt.log_conf import Logger
log = Logger.log
__all__ = ['raw_data', 'find_encoding', 'save_raw_data']
def raw_data(filepath, encoding=None):
if filepath and os.path.exists(filepath):
if not os.access(filepath, os.R_OK):
log.warning('file has not read permission')
return
rawdata = None
if not encoding:
encoding = find_encoding(filepath)
try:
log.debug('trying to read file with encoding: %s' % encoding)
f = codecs.open(filepath, 'r', encoding=encoding)
rawdata = f.read()
f.close()
except Exception, e:
log.critical(e)
try:
log.debug('trying to read without encoding:%s' % e)
f = codecs.open(filepath, mode='r')
rawdata = f.read()
f.close()
except Exception, ee:
log.critical('(second try)')
log.critical(ee)
return rawdata
def find_encoding(filepath):
if filepath:
rawdata = codecs.open(filepath, mode="r").read()
result = chardet.detect(rawdata)
encoding = result['encoding']
log.debug('looking for the correct encoding: %s' % encoding)
return encoding
def save_raw_data(filepath, text, encoding='utf-8'):
log.debug('saving text data')
if filepath:
try:
f = codecs.open(filepath, mode='w', encoding=encoding)
if isinstance(text, list):
for line in text:
line = line.replace('\n', os.linesep)
f.write(line + os.linesep)
else:
f.write(text)
f.close()
return filepath
except Exception, e:
log.critical(e)
try:
if encoding:
f = codecs.open(filepath, mode='w')
f.write(text)
f.close()
else:
f = open(filepath, 'w')
f.write(text)
return filepath
except Exception, ee:
log.critical(ee)
```
#### File: atxt/formats/xlsx.py
```python
import xlrd
import os
from _utils import save_raw_data
def xlsx(from_file, to_txt, opts):
filename = from_file.path
workbook = xlrd.open_workbook(filename)
sheets_name = workbook.sheet_names()
output = os.linesep
for names in sheets_name:
worksheet = workbook.sheet_by_name(names)
num_rows = worksheet.nrows
num_cells = worksheet.ncols
for curr_row in range(num_rows):
row = worksheet.row(curr_row)
new_output = []
for index_col in xrange(num_cells):
value = worksheet.cell_value(curr_row, index_col)
if value:
if isinstance(value, (int, float)):
value = unicode(value)
new_output.append(value)
if new_output:
output += u' '.join(new_output) + unicode(os.linesep)
return save_raw_data(to_txt.path, output)
```
#### File: atxt/gui/window.py
```python
import logging
import os
import sys
from PySide import QtGui, QtCore
from PySide.QtGui import (
QFileDialog,
QGridLayout,
QGroupBox,
QCheckBox,
QTextBrowser,
QPushButton,
QMessageBox
)
from atxt.formats import supported_formats
from atxt.log_conf import Logger
from atxt.utils import parser_opts, extract_ext, remove
from atxt.check import check_os
from constants import *
from start import Start
from scan import Scan
log = Logger.log
path_home = os.path.expanduser('~')
checked = QtCore.Qt.Checked
unchecked = QtCore.Qt.Unchecked
class QtHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
record = self.format(record)
if record:
XStream.stdout().write('%s\n' % record)
handler = QtHandler()
log.addHandler(handler)
class XStream(QtCore.QObject):
""" http://stackoverflow.com/questions/24469662/
how-to-redirect-logger-output-into-pyqt-text-widget"""
_stdout = None
_stderr = None
messageWritten = QtCore.Signal(str)
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg):
if (not self.signalsBlocked()):
self.messageWritten.emit(msg)
@staticmethod
def stdout():
if (not XStream._stdout):
XStream._stdout = XStream()
sys.stdout = XStream._stdout
return XStream._stdout
@staticmethod
def stderr():
if (not XStream._stderr):
XStream._stderr = XStream()
sys.stderr = XStream._stderr
return XStream._stderr
class Window(QtGui.QWidget):
layout = QGridLayout()
_layout1 = QtGui.QVBoxLayout()
_layout2 = QtGui.QVBoxLayout()
totalfiles = 0
def __init__(self):
QtGui.QWidget.__init__(self)
log.debug('GUI aTXT')
self._set_layout_source()
self._set_layout_save()
self._set_layout_console()
self._set_layout2()
self._connect_acctions()
box = QGroupBox(LABEL_BOX_LAYOUT1)
box.setLayout(self._layout1)
self.layout.addWidget(box, 0, 0)
box = QGroupBox(LABEL_BOX_LAYOUT2)
box.setLayout(self._layout2)
self.layout.addWidget(box, 0, 1)
self.setLayout(self.layout)
XStream.stdout().messageWritten.connect(self._cursor_visible)
XStream.stderr().messageWritten.connect(self._cursor_visible)
def _cursor_visible(self, value):
self._console.insertPlainText(value)
self._console.ensureCursorVisible()
def _set_layout_source(self):
self.setWindowTitle(TITLE_WINDOW)
self.setFixedSize(850, 400)
self.setContentsMargins(15, 15, 15, 15)
self._layout1 = QtGui.QVBoxLayout()
self._layout1.addStretch(1)
self._btn_source = QtGui.QPushButton(BTN_BROWSER)
self._edt_source = QtGui.QLineEdit()
self._edt_source.setText(path_home)
self._edt_source.setFixedSize(330, 20)
self._edt_source.setAlignment(QtCore.Qt.AlignRight)
self._depth = QtGui.QSpinBox()
self._depth.setToolTip(TOOLTIP_DEPTH)
self._depth.setMinimum(0)
self._depth.setMaximum(100)
self._depth.setFixedSize(50, 25)
self._label1 = QtGui.QLabel()
self._label1.setText(LABEL_DEPTH)
box = QGroupBox(LABEL_BOX_DIRECTORY)
ly = QGridLayout()
ly.addWidget(self._btn_source, 0, 0)
ly.addWidget(self._edt_source, 0, 1)
ly.addWidget(self._label1, 0, 2)
ly.addWidget(self._depth, 0, 3)
box.setLayout(ly)
self._layout1.addWidget(box)
def _set_layout_save(self):
self._label_save = QtGui.QLabel(MSG_SAVE_IN)
self._edt_save = QtGui.QLineEdit("")
self._edt_save.setFixedSize(350, 20)
self._edt_save.setToolTip(TOOLTIP_SAVEIN)
self._edt_save.setText(path_home)
self._edt_save.setAlignment(QtCore.Qt.AlignRight)
self._btn2 = QtGui.QPushButton(BTN_BROWSER)
self._btn2.clicked.connect(self.set_directory_save_in)
self._check_overwrite = QtGui.QCheckBox(LABEL_OVERWRITE)
self._check_overwrite.setToolTip(TOOLTIP_OVERWRITE)
self._check_overwrite.setCheckState(checked)
self._check_ocr = QtGui.QCheckBox('OCR')
self._check_ocr.setCheckState(unchecked)
self._label_lang = QtGui.QLabel('ORC language: ')
self._edt_lang = QtGui.QLineEdit()
self._edt_lang.setText('spa')
self._edt_lang.setFixedSize(80, 20)
self._edt_lang.setAlignment(QtCore.Qt.AlignRight)
self._check_ocr_necessary = QtGui.QCheckBox("Smart OCR")
self._check_ocr_necessary.setToolTip(TOOLTIP_OCR_NECESSARY)
self._check_ocr_necessary.setCheckState(unchecked)
# self._check_use_temp = QtGui.QCheckBox(LABEL_USE_TEMP)
# self._check_use_temp.setToolTip(TOOLTIP_USE_TEMP)
# self._check_use_temp.setCheckState(unchecked)
box = QGroupBox(LABEL_BOX_SAVE_IN)
box.setToolTip(TOOLTIP_BOX_SAVEIN)
ly = QGridLayout()
ly.addWidget(self._btn2, 0, 0)
ly.addWidget(self._edt_save, 0, 1)
ly.addWidget(self._check_overwrite, 0, 4)
ly.addWidget(self._check_ocr, 1, 0)
ly.addWidget(self._label_lang, 1, 1)
ly.addWidget(self._edt_lang, 1, 2)
ly.addWidget(self._check_ocr_necessary, 1, 3)
# ly.addWidget(self._check_use_temp, 0, 5)
box.setLayout(ly)
self._layout1.addWidget(box)
def _set_layout_console(self):
self._console = QTextBrowser(self)
frameStyle = QtGui.QFrame.Sunken | QtGui.QFrame.Panel
self._console.setFrameStyle(frameStyle)
# DETAILS
# self._progress_bar = QtGui.QProgressBar()
# self._progress_bar.setMinimum(0)
# self._progress_bar.setMaximum(100)
self._layout1.addWidget(self._console)
# self._layout1.addWidget(self._progress_bar)
self._btn_save_log = QtGui.QPushButton(BTN_SAVE_LOG)
self._btn_save_log.clicked.connect(self._save_log)
self._layout1.addWidget(self._btn_save_log)
def _save_log(self):
save_log_dir = QFileDialog.getSaveFileName(
self, "Save Log File", "", "Text File (*.txt)")
try:
remove(save_log_dir[0])
except Exception, e:
log.error(e)
f = QtCore.QFile(save_log_dir[0])
try:
if f.open(QtCore.QIODevice.ReadWrite):
stream = QtCore.QTextStream(f)
text = self._console.toPlainText()
text = text.replace('\n', os.linesep)
exec "stream << text"
f.flush()
f.close()
except Exception, e:
log.critical(e)
# def _cursor_end(self, value=None):
# self._console.moveCursor(QtGui.QTextCursor.End)
def _set_layout2(self):
self.formats = []
for ext in supported_formats():
self.formats.append((ext, QCheckBox(str(ext))))
box = QGroupBox(LABEL_BOX_FORMATS)
ly = QGridLayout()
for ext, widget in self.formats:
ly.addWidget(widget)
box.setLayout(ly)
self._layout2.addWidget(box)
# ACTIONS
# self._btn_stop = QPushButton("Stop")
self._btn_start = QPushButton("Start")
self._btn_scan = QPushButton("Scan")
self._btn_scan.setEnabled(True)
self._btn_scan.setToolTip(TOOLTIP_SCAN)
# self._btn_stop.setEnabled(False)
self._btn_start.setEnabled(True)
box = QGroupBox(LABEL_BOX_ACTIONS)
ly = QGridLayout()
ly.setColumnStretch(1, 1)
# ly.addWidget(self._btn_stop, 1, 0)
ly.addWidget(self._btn_scan, 0, 0)
ly.addWidget(self._btn_start, 1, 0)
box.setLayout(ly)
self._layout2.addWidget(box)
def closeEvent(self, event):
log.debug("Exit")
event.accept()
def on_show_info(self, value):
QtGui.QMessageBox.information(self, "Information", value)
def set_source(self):
dialog = QFileDialog(self)
if check_os() == 'Windows':
dialog.setFileMode(QFileDialog.Directory)
# dialog.setOption(QFileDialog.DontUseNativeDialog)
dialog.setOption(QFileDialog.ShowDirsOnly)
else:
dialog.setFileMode(QFileDialog.AnyFile)
dialog.setViewMode(QFileDialog.Detail)
dialog.setDirectory(path_home)
if dialog.exec_():
paths = dialog.selectedFiles()
for f in paths:
if os.path.isdir(f):
self._btn_scan.setEnabled(True)
self._edt_save.setText(f)
elif os.path.isfile(f):
log.debug('--from %s' % os.path.dirname(f))
log.debug('file: %s' % os.path.basename(f))
self._btn_scan.setEnabled(False)
self._edt_save.setText(os.path.dirname(f))
ext_file = extract_ext(f)
for ext, widget in self.formats:
if ext == ext_file:
widget.setCheckState(checked)
log.debug('--depth: %s' % self._depth.text())
self._edt_source.setText(f)
self._edt_save.setText(f)
def set_directory_save_in(self):
options = QFileDialog.DontResolveSymlinks | QFileDialog.ShowDirsOnly
directory = QFileDialog.getExistingDirectory(self,
MSG_SAVE_IN,
self._edt_save.text(), options)
if directory:
self._edt_save.setText(directory)
log.debug('--to: %s' % directory)
def options(self):
f = self._edt_source.text()
if not os.path.exists(f):
self.on_show_info('Choose a valid source!"')
return
ext_file = None
if os.path.isfile(f):
ext_file = extract_ext(f)
tfiles = []
for ext, widget in self.formats:
if ext == ext_file:
widget.setCheckState(checked)
if widget.isChecked():
tfiles.append(ext)
if self._check_ocr_necessary.isChecked():
self._check_ocr.setCheckState(unchecked)
only_ocr = self._check_ocr.isChecked() and not self._check_ocr_necessary.isChecked()
necessary_ocr = not self._check_ocr.isChecked() and self._check_ocr_necessary.isChecked()
opts = {
'<source>': [f],
'--to': self._edt_save.text(),
'-o': self._check_overwrite.isChecked(),
'--ocr': only_ocr ,
'--ocr-necessary': necessary_ocr,
'--without-ocr': not only_ocr,
'--use-temp': False,
# '--use-temp': self._check_use_temp.isChecked(),
'--depth': int(self._depth.text()),
'-l': self._edt_lang.text(),
'tfiles': tfiles,
}
return parser_opts(opts)
def _connect_acctions(self):
self._btn_source.clicked.connect(self.set_source)
self._btn_scan.clicked.connect(self._scan)
self._btn_start.clicked.connect(self._start)
def _scan(self):
opts = self.options()
if not opts['tfiles']:
QtGui.QMessageBox.information(
self, "Information", NONE_EXT_CHOOSED)
log.debug(NONE_EXT_CHOOSED)
return
flags = QMessageBox.StandardButton.Yes
flags |= QMessageBox.StandardButton.No
question = WARNING_LONG_PROCESS
response = QMessageBox.question(self, "Question", question, flags)
if response == QMessageBox.No:
log.info("Scaning cancelled")
return
log.debug("Starting process")
log.warning(TOOLTIP_SCAN)
self._btn_start.setEnabled(False)
self._thread = Scan(self)
self._thread.start()
self._btn_start.setEnabled(True)
log.info('')
log.info('')
def _stop(self):
log.debug('_stop()')
if hasattr(self, "_thread"):
try:
self._thread.finished()
self._thread.deleteLater()
self._thread.FLAG = False
del self._thread
except Exception, e:
log.debug('it can delete thread: %s' % e)
def _start(self):
log.debug('_start()')
flags = QMessageBox.StandardButton.Yes
flags |= QMessageBox.StandardButton.No
question = WARNING_LONG_PROCESS
response = QMessageBox.question(self, "Question", question, flags)
if response == QMessageBox.Yes:
log.debug("Starting process")
elif QMessageBox.No:
log.debug("Starting cancelled")
return
self._btn_start.setEnabled(False)
self._btn_scan.setEnabled(False)
self._thread = Start(self)
self._thread.start()
self._thread.finished.connect(self._thread_finished)
self._thread.terminated.connect(self._thread_finished)
def _thread_finished(self):
self._btn_start.setEnabled(True)
self._btn_scan.setEnabled(True)
``` |
{
"source": "jonaqp/heroku",
"score": 2
} |
#### File: apps/container/views.py
```python
from json import loads
from django.contrib import messages
from django.contrib.auth import get_user_model
from core import constants as core_constants
from core.mixins import TemplateLoginRequiredMixin
from .forms import ContainerForm, TripForm
from .models import Container, Trip
User = get_user_model()
class ViewerBoatView(TemplateLoginRequiredMixin):
template_name = 'container/viewer/index.html'
def get(self, request, *args, **kwargs):
self.boats = Container.objects.all().values_list('identifier_mac', flat=True)
print(self.boats)
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['boats'] = self.boats
return context
class EditorBoatView(TemplateLoginRequiredMixin):
template_name = 'container/editor/index.html'
def get(self, request, *args, **kwargs):
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class ContainerView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/container.html'
def __init__(self, **kwargs):
self.form_container = None
self.container_all = None
super().__init__(**kwargs)
def dispatch(self, request, *args, **kwargs):
self.container_all = Container.objects.all()
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.form_container = ContainerForm(auto_id='id_container_%s')
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
self.form_container = ContainerForm(
data=request.POST, auto_id='id_container_%s')
if self.form_container.is_valid():
self.form_container.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.warning(request, core_constants.MESSAGE_TAGS['warning'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_container'] = self.form_container
context['list_containers'] = self.container_all
return context
class ContainerEditView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/container_add.html'
def __init__(self, **kwargs):
self.form_container = None
self.container = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
container = request.GET['container_id']
self.container = Container.objects.get(pk=container)
self.form_container = ContainerForm(
auto_id='id_container_%s', instance=self.container)
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
data = loads(request.body.decode('utf-8'))
data_container_pk = data['form_pk']
data_form_container = data['form']
self.container = Container.objects.get(pk=data_container_pk)
self.form_container = ContainerForm(
data_form_container, auto_id='id_container_%s', instance=self.container)
if self.form_container.is_valid():
self.form_container.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.error(request, core_constants.MESSAGE_TAGS['error'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_container'] = self.form_container
context['form_pk'] = self.container.id
context['btn_edit'] = core_constants.BTN_EDIT
return context
class ContainerListView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/container_list.html'
def __init__(self, **kwargs):
self.container_all = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
self.container_all = Container.objects.all()
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['list_containers'] = self.container_all
return context
class TripView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/trip.html'
def __init__(self, **kwargs):
self.form_trip = None
self.trip_all = None
super().__init__(**kwargs)
def dispatch(self, request, *args, **kwargs):
self.trip_all = Trip.objects.all()
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.form_trip = TripForm(auto_id='id_trip_%s')
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
self.form_trip = TripForm(
data=request.POST, auto_id='id_trip_%s')
if self.form_trip.is_valid():
self.form_trip.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.warning(request, core_constants.MESSAGE_TAGS['warning'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_trip'] = self.form_trip
context['list_trips'] = self.trip_all
return context
class TripEditView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/trip_add.html'
def __init__(self, **kwargs):
self.form_trip = None
self.trip = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
trip = request.GET['trip_id']
self.trip = Trip.objects.get(pk=trip)
self.form_trip = TripForm(
auto_id='id_trip_%s', instance=self.trip)
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
data = loads(request.body.decode('utf-8'))
data_trip_pk = data['form_pk']
data_form_trip = data['form']
self.trip = Trip.objects.get(pk=data_trip_pk)
self.form_trip = TripForm(
data_form_trip, auto_id='id_trip_%s', instance=self.trip)
if self.form_trip.is_valid():
self.form_trip.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.error(request, core_constants.MESSAGE_TAGS['error'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_trip'] = self.form_trip
context['form_pk'] = self.trip.id
context['btn_edit'] = core_constants.BTN_EDIT
return context
class TripListView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/container/trip_list.html'
def __init__(self, **kwargs):
self.trip_all = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
self.trip_all = Trip.objects.all()
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['list_trips'] = self.trip_all
return context
```
#### File: apps/customer/views.py
```python
from json import loads
from urllib import parse as urlparse
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, login, logout
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import AuthenticationForm
from django.shortcuts import redirect
from django.shortcuts import resolve_url
from django.utils.decorators import method_decorator
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.generic.base import RedirectView
from django.views.generic.edit import FormView
from apps.customer.forms import (
UserProfileForm, UserCreationAdminForm)
from apps.customer.models import UserProfile
from core import constants as core_constants
from core.mixins import TemplateLoginRequiredMixin
from core.utils import uploads
from core.middleware.thread_user import CuserMiddleware
from .forms import (
UserProfileImageForm, UserProfilePasswordForm)
User = get_user_model()
class ProfileView(TemplateLoginRequiredMixin):
template_name = 'core/profile/index.html'
def __init__(self, **kwargs):
self.profile_group = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
profile = UserProfile.current.values_list(
'user__groups__group__name').filter(user__email=request.user.email)
self.profile_group = UserProfile.current.filter(
user__groups__group__name__in=profile).distinct().prefetch_related('user')
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['profile_group'] = self.profile_group
return context
class ProfileSettingsView(TemplateLoginRequiredMixin):
template_name = 'core/profile/settings.html'
def __init__(self, **kwargs):
self.form_profile = None
self.current_image = None
self.form_profile_image = None
self.form_profile_password = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
profile = UserProfile.current.get(user__email=request.user.email)
user = User.objects.get(pk=profile.user.id)
self.form_profile = UserProfileForm(
auto_id='id_profile_%s', instance=profile)
self.form_profile_image = UserProfileImageForm(
auto_id='id_profile_image_%s', instance=profile)
self.form_profile_password = UserProfilePasswordForm(
auto_id='id_profile_password_%s', instance=user)
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
current_form_send = request.POST.get('current_form_send')
profile = UserProfile.current.get(user__email=request.user.email)
user = User.objects.get(pk=profile.user.id)
self.form_profile = UserProfileForm(
auto_id='id_profile_%s', instance=profile)
self.form_profile_image = UserProfileImageForm(
auto_id='id_profile_image_%s', instance=profile)
self.form_profile_password = UserProfilePasswordForm(
auto_id='id_profile_password_%s', instance=user)
if current_form_send == 'form_profile':
self.form_profile = UserProfileForm(
request.POST, auto_id='id_profile_%s', instance=profile)
if self.form_profile.is_valid():
self.form_profile.save()
if current_form_send == 'form_upload':
self.current_image = profile.profile_image
self.form_profile_image = UserProfileImageForm(
request.POST, request.FILES, auto_id='id_profile_image_%s',
instance=profile)
if self.form_profile_image.is_valid():
form_profile_image = self.form_profile_image.save(
current_image=self.current_image, commit=False)
if request.FILES:
uploads.handle_upload_profile(
name_image=form_profile_image.profile_image,
resize_height=100)
if current_form_send == 'form_password':
self.form_profile_password = UserProfilePasswordForm(
request.POST, auto_id='id_profile_password_%s', instance=user)
if self.form_profile_password.is_valid():
self.form_profile.save(commit=False)
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_profile'] = self.form_profile
context['form_profile_upload'] = self.form_profile_image
context['form_profile_password'] = self.form_profile_password
return context
class LoginView(FormView):
form_class = AuthenticationForm
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'core/registration/login.html'
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, *args, **kwargs):
return super(LoginView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
self.check_and_delete_test_cookie()
login(self.request, form.get_user())
return super(LoginView, self).form_valid(form)
def form_invalid(self, form):
self.set_test_cookie()
return super(LoginView, self).form_invalid(form)
def get_success_url(self):
if self.success_url:
redirect_to = self.success_url
else:
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, ''))
netloc = urlparse.urlparse(redirect_to)[1]
if not redirect_to:
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
elif netloc and netloc != self.request.get_host():
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return redirect_to
def set_test_cookie(self):
self.request.session.set_test_cookie()
def check_and_delete_test_cookie(self):
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
return True
return False
def get(self, request, *args, **kwargs):
self.set_test_cookie()
if self.request.user.is_authenticated():
return redirect(self.success_url)
return super(LoginView, self).get(request, *args, **kwargs)
class LogoutView(RedirectView):
url = '/'
permanent = False
def get(self, request, *args, **kwargs):
logout(request)
CuserMiddleware.del_user()
return super().get(request, *args, **kwargs)
class UserView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/users/user.html'
def __init__(self, **kwargs):
self.form_profile = None
self.form_user = None
self.user_all = None
super().__init__(**kwargs)
def dispatch(self, request, *args, **kwargs):
self.user_all = User.objects.all()
return super().dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.form_profile = UserProfileForm(auto_id='id_profile_%s')
self.form_user = UserCreationAdminForm(auto_id='id_user_%s')
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
self.form_user = UserCreationAdminForm(
data=request.POST, auto_id='id_user_%s')
self.form_profile = UserProfileForm(
data=request.POST, auto_id='id_profile_%s')
if self.form_user.is_valid():
user = self.form_user.save(new=True, commit=False)
profile = UserProfile.objects.get(user=user)
self.form_profile = UserProfileForm(
request.POST, auto_id='id_profile_%s', instance=profile)
if self.form_profile.is_valid():
self.form_profile.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.warning(request, core_constants.MESSAGE_TAGS['warning'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_profile'] = self.form_profile
context['form_user'] = self.form_user
context['list_users'] = self.user_all
return context
class UserEditView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/users/user_add.html'
def __init__(self, **kwargs):
self.form_profile = None
self.user = None
self.form_user = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
user_email = request.GET['user_email']
self.user = User.objects.get(email=user_email)
self.form_user = UserCreationAdminForm(
auto_id='id_user_%s', instance=self.user)
self.form_profile = UserProfileForm(
auto_id='id_profile_%s', instance=self.user)
return super().render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
data = loads(request.body.decode('utf-8'))
data_user_pk = data['form_pk']
data_form_user = data['form']
self.user = User.objects.get(pk=data_user_pk)
self.form_user = UserCreationAdminForm(
data_form_user, auto_id='id_user_%s', instance=self.user)
self.form_profile = UserProfileForm(
data_form_user, auto_id='id_profile_%s', instance=self.user)
if self.form_user.is_valid():
self.form_user.save(new=False, commit=False)
if self.form_profile.is_valid():
self.form_profile.save()
messages.success(request, core_constants.MESSAGE_TAGS['success'])
else:
messages.error(request, core_constants.MESSAGE_TAGS['error'])
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form_profile'] = self.form_profile
context['form_user'] = self.form_user
context['form_pk'] = self.user.id
context['btn_edit'] = core_constants.BTN_EDIT
return context
class UserListView(TemplateLoginRequiredMixin):
template_name = 'administrator/maintenance/users/user_list.html'
def __init__(self, **kwargs):
self.user_all = None
super().__init__(**kwargs)
def get(self, request, *args, **kwargs):
self.user_all = User.objects.all()
return super().render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['list_users'] = self.user_all
return context
```
#### File: apps/menu/forms.py
```python
from django import forms
from django.contrib.auth.models import Group
from core.constants import NAME_SELECT_DEFAULT, STATUS_MODEL1, SELECT_DEFAULT
from .models import (
GroupState, GroupModule, GroupSubModule, SubModule)
class GroupForm(forms.ModelForm):
current_status = forms.ChoiceField(
widget=forms.Select(), choices=SELECT_DEFAULT + STATUS_MODEL1,
required=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name'].widget.attrs.update(
{'class': 'form-control'})
self.fields['current_status'].widget.attrs.update(
{'class': 'form-control'})
if self.instance.id:
group_state = GroupState.current.get(group=self.instance.id)
self.fields['name'].initial = self.instance.name
self.fields['current_status'].initial = group_state.current_status
class Meta:
model = Group
fields = ['name']
def save(self, *arg, **kwargs):
current_status = self.cleaned_data.get('current_status')
group = super().save(*arg, **kwargs)
group_state = GroupState.objects.filter(group=group)
if group_state.exists():
group_state = GroupState.objects.get(group=group)
group_state.current_status = current_status
group_state.save()
else:
GroupState.objects.create(group=group, current_status=current_status)
group.save()
return group
class GroupModuleForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance')
self.fields['group'].empty_label = NAME_SELECT_DEFAULT
self.fields['module'].empty_label = NAME_SELECT_DEFAULT
if instance:
self.fields['group'].widget.attrs.update({'disabled': True})
self.fields['module'].widget.attrs.update({'disabled': True})
class Meta:
model = GroupModule
fields = ['group', 'module']
widgets = {
"group": forms.Select(attrs={'class': 'form-control'}),
"module": forms.Select(attrs={'class': 'form-control'}),
}
def save(self, *arg, **kwargs):
group_module = super().save(*arg, **kwargs)
return group_module
class GroupModuleSubmoduleForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance')
initial = kwargs.get('initial')
if instance:
module = self.instance.group_module.module
query = SubModule.current.filter(module=module)
self.fields['submodule'] = forms.ModelChoiceField(queryset=query)
if initial:
module = initial['module']
query = SubModule.current.filter(module=module)
self.fields['submodule'] = forms.ModelChoiceField(queryset=query)
self.fields['submodule'].required = True
self.fields['submodule'].widget.attrs.update({'class': 'form-control'})
self.fields['submodule'].empty_label = NAME_SELECT_DEFAULT
def clean(self):
cleaned_data = super().clean()
delete_valid = cleaned_data['DELETE']
id_submodule = cleaned_data['id']
if delete_valid:
GroupSubModule.current.get(pk=id_submodule.id).delete(force=True)
return cleaned_data
class Meta:
model = GroupSubModule
fields = ['group_module', 'submodule']
def save(self, *args, **kwargs):
group_submodule = super().save(*args, **kwargs)
return group_submodule
```
#### File: heroku/core/mixins.py
```python
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.cache import cache
from django.shortcuts import redirect
from django.views.generic import TemplateView, ListView
from apps.customer.models import UserProfile
class TemplateLoginRequiredMixin(LoginRequiredMixin, TemplateView):
login_url = '/'
def __init__(self, **kwargs):
self.modules = None
self.current_user = None
super().__init__(**kwargs)
def dispatch(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return redirect('/?next=%s' % request.path)
if not cache.get('modules'):
self.modules = request.user.get_menu()
cache.set('modules', self.modules, 30 * 60)
else:
self.modules = cache.get('modules')
self.current_user = UserProfile.objects.get(
user__email=request.user.email)
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['modules'] = self.modules
context['request_path'] = self.request.get_full_path()
context['current_user'] = self.current_user
return context
class ListViewRequiredMixin(LoginRequiredMixin, ListView):
pass
```
#### File: core/utils/fields.py
```python
import uuid
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from core.constants import SELECT_DEFAULT, STATUS_MODEL1, STATUS_MODEL2
from core.queryset import AuditableManager
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
class TimeStampedModel(models.Model):
date_created = models.DateTimeField(
blank=True, null=True, editable=False, auto_now_add=True,
verbose_name=_('date created'))
date_modified = models.DateTimeField(
blank=True, null=True, editable=False, auto_now=True,
verbose_name=_('date modified'))
# def save(self, *args, **kwargs):
# if self.pk:
# self.date_modified = datetime()
# else:
# self.date_created = datetime()
# kwargs['force_insert'] = False
# super(TimeStampedModel, self).save(*args, **kwargs)
#
# def delete(self, force=False, *args, **kwargs):
# self.is_deleted = True
# self.save()
# if force:
# super(TimeStampedModel, self).delete(*args, **kwargs)
class Meta:
abstract = True
class UUIDModel(models.Model):
id = models.UUIDField(primary_key=True, editable=False, default=uuid.uuid4)
class Meta:
abstract = True
class StatusModel(models.Model):
is_deleted = models.BooleanField(default=False, editable=False)
class Meta:
abstract = True
class StatusCurrent(models.Model):
current_status = models.CharField(
max_length=10, choices=SELECT_DEFAULT + STATUS_MODEL1)
class Meta:
abstract = True
class StatusBillingBModel(models.Model):
billing_status = models.CharField(
max_length=10, choices=SELECT_DEFAULT + STATUS_MODEL2)
class Meta:
abstract = True
class ModuleModel(models.Model):
text = models.CharField(max_length=20, null=True, blank=True)
style = models.CharField(max_length=20, null=True, blank=True)
match = models.CharField(default="#", max_length=100, null=False,
blank=False)
class Meta:
abstract = True
def __str__(self):
return u'{0}'.format(self.text)
class BaseModel(UUIDModel, TimeStampedModel, StatusModel):
current = AuditableManager()
objects = models.Manager()
class Meta:
abstract = True
class BaseModel2(UUIDModel, TimeStampedModel):
current = AuditableManager()
objects = models.Manager()
class Meta:
abstract = True
class BaseModel3(UUIDModel, TimeStampedModel, StatusModel, StatusBillingBModel):
current = AuditableManager()
objects = models.Manager()
class Meta:
abstract = True
class BaseModel4(UUIDModel, TimeStampedModel, StatusModel, ModuleModel):
current = AuditableManager()
objects = models.Manager()
class Meta:
abstract = True
class BaseModel5(UUIDModel, TimeStampedModel, StatusCurrent):
current = AuditableManager()
objects = models.Manager()
class Meta:
abstract = True
```
#### File: core/utils/resize_image.py
```python
from __future__ import division
import math
import sys
from functools import wraps
from PIL import Image
class ImageSizeError(Exception):
def __init__(self, actual_size, required_size):
self.message = 'Image is too small, Image size : %s, Required size : %s' % (
actual_size, required_size)
self.actual_size = actual_size
self.required_size = required_size
def __str__(self):
return repr(self.message)
def validate(validator):
def decorator(func):
"""Bound decorator to a particular validator function"""
@wraps(func)
def wrapper(image, size, validate=True):
if validate:
validator(image, size)
return func(image, size)
return wrapper
return decorator
def _is_big_enough(image, size):
"""Check that the image's size superior to `size`"""
if (size[0] > image.size[0]) and (size[1] > image.size[1]):
raise ImageSizeError(image.size, size)
def _width_is_big_enough(image, width):
"""Check that the image width is superior to `width`"""
if width >= image.size[0]:
raise ImageSizeError(image.size[0], width)
def _height_is_big_enough(image, height):
"""Check that the image height is superior to `height`"""
if height >= image.size[1]:
raise ImageSizeError(image.size[1], height)
@validate(_is_big_enough)
def resize_crop(image, size):
img_format = image.format
image = image.copy()
old_size = image.size
left = (old_size[0] - size[0]) / 2
top = (old_size[1] - size[1]) / 2
right = old_size[0] - left
bottom = old_size[1] - top
rect = [int(math.ceil(x)) for x in (left, top, right, bottom)]
left, top, right, bottom = rect
crop = image.crop((left, top, right, bottom))
crop.format = img_format
return crop
@validate(_is_big_enough)
def resize_cover(image, size):
img_format = image.format
img = image.copy()
img_size = img.size
ratio = max(size[0] / img_size[0], size[1] / img_size[1])
new_size = [
int(math.ceil(img_size[0] * ratio)),
int(math.ceil(img_size[1] * ratio))
]
img = img.resize((new_size[0], new_size[1]), Image.LANCZOS)
img = resize_crop(img, size)
img.format = img_format
return img
def resize_contain(image, size):
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), Image.LANCZOS)
background = Image.new('RGBA', (size[0], size[1]), (255, 255, 255, 0))
img_position = (
int(math.ceil((size[0] - img.size[0]) / 2)),
int(math.ceil((size[1] - img.size[1]) / 2))
)
background.paste(img, img_position)
background.format = img_format
return background
@validate(_width_is_big_enough)
def resize_width(image, size):
try:
width = size[0]
except:
width = size
img_format = image.format
img = image.copy()
img_size = img.size
new_height = int(math.ceil((width / img_size[0]) * img_size[1]))
img.thumbnail((width, new_height), Image.LANCZOS)
img.format = img_format
return img
@validate(_height_is_big_enough)
def resize_height(image, size):
try:
height = size[1]
except:
height = size
img_format = image.format
img = image.copy()
img_size = img.size
new_width = int(math.ceil((height / img_size[1]) * img_size[0]))
img.thumbnail((new_width, height), Image.LANCZOS)
img.format = img_format
return img
def resize_thumbnail(image, size):
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), Image.LANCZOS)
img.format = img_format
return img
def resize(method, image, size):
list_images = ['crop', 'cover', 'contain', 'width', 'height', 'thumbnail']
if method not in list_images:
raise ValueError(u"method argument should be one of "
u"'crop', 'cover', 'contain', 'width', "
u"'height' or 'thumbnail'")
return getattr(sys.modules[__name__], 'resize_%s' % method)(image, size)
```
#### File: core/utils/uploads.py
```python
import os
import sys
from PIL import Image
from django.conf import settings
from django.core.files.storage import default_storage as storage
from core.utils import resize_image
prefix_profile = 'uploads/profiles/'
prefix_container = 'uploads/container/'
prefix_upload_company = 'upload/logo_company'
def upload_location_profile(instance, filename):
file_base, extension = filename.split(".")
path_file = u"{0:s}/{1:s}.{2:s}".format(
str(instance.user.id), str(instance.id), extension)
return os.path.join(prefix_profile, path_file)
def upload_location_trip(instance, filename):
file_base, extension = filename.split(".")
path_file = u"{0:s}/shellcatch_{1:s}_{2:s}-{3:s}.{4:s}".format(
str(instance.container.identifier_mac),
str(instance.container.identifier_mac),
str(instance.datetime_image.strftime("%Y_%m_%d")),
str(instance.datetime_image.strftime("%H-%M-%S")),
extension).lower()
return os.path.join(prefix_container, path_file)
def upload_location_company(instance, filename):
file_base, extension = filename.split(".")
return "{0}/{1}.{2}".format(
prefix_upload_company, instance.name, extension)
def handle_upload_remove(current_image):
if settings.DEBUG:
if current_image:
image_path = "{0}/{1}".format(str(settings.MEDIA_ROOT),
str(current_image))
if os.path.isfile(image_path):
os.remove(image_path)
else:
pass
def handle_upload_profile(name_image, resize_height=100):
if settings.DEBUG:
url = "{0}/{1}".format(str(settings.MEDIA_ROOT).replace('\\', '/'),
str(name_image))
image = Image.open(url)
filename_base, filename_ext = os.path.splitext(url)
filename = url.rsplit('/', 1)[1].rsplit('.', 1)[0]
fullpath = url.rsplit('/', 1)[0]
if filename_ext not in ['.jpg', '.jpeg', '.png']:
sys.exit()
image = resize_image.resize_height(image, resize_height)
new_resize_image = filename + "_" + str(resize_height) + filename_ext
image.save(fullpath + '/' + new_resize_image)
else:
file_path = name_image.name
filename_base, filename_ext = os.path.splitext(file_path)
thumb_file_path = filename_base + "_" + str(resize_height) + filename_ext
f = storage.open(file_path, 'r')
image = Image.open(f)
if filename_ext not in ['.jpg', '.jpeg', '.png']:
sys.exit()
image = resize_image.resize_height(image, resize_height)
f_thumb = storage.open(thumb_file_path, "w")
image.save(f_thumb, "jpeg")
f_thumb.close()
``` |
{
"source": "jonarani/IAS0360_project",
"score": 3
} |
#### File: IAS0360_project/source/k_means.py
```python
import matplotlib.pyplot as plt
import numpy as np
import cv2
import os
import shutil
# Count unique values in matrix
def count_unique(matrix, position):
counter = 0
np_unique = np.unique(matrix)
if len(np_unique) > (position - 1):
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if matrix[i][j] == np_unique[position - 1]:
counter += 1
return counter
# Get k-means image in gray scale
def get_k_means(img, K, attempts):
vectorized = img.reshape((-1,3))
vectorized = np.float32(vectorized)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
ret,label,center = cv2.kmeans(vectorized, K, None, criteria, attempts, cv2.KMEANS_PP_CENTERS)
center = np.uint8(center)
res = center[label.flatten()]
result_image = res.reshape((img.shape))
# Convert to gray scale for easier warm object detection (less unique values)
return cv2.cvtColor(result_image, cv2.COLOR_BGR2GRAY)
# Analyze image using k-means clustering
def analyze_image(fpath, K, attempts):
img = cv2.imread(fpath, cv2.COLOR_BGR2RGB)
gray = get_k_means(img, K, attempts)
picture_size = len(gray) * len(gray[0])
warm_object_size = float((count_unique(gray, 3) / picture_size)*100)
percentage_string = "{:.1f}".format(float((count_unique(gray, 1) / picture_size)*100)) + ",{:.1f}".format(float((count_unique(gray, 2) / picture_size)*100)) + ",{:.1f}".format(warm_object_size) + ",{:.1f}".format(float((count_unique(gray, 4) / picture_size)*100))
print(str(fpath) + ": " + str(np.unique(gray)) + ": " + percentage_string)
figure_size = 15
plt.figure(figsize=(figure_size,figure_size))
plt.subplot(1,2,1),plt.imshow(img)
plt.title('Original image'), plt.xticks([]), plt.yticks([])
plt.subplot(1,2,2),plt.imshow(gray)
plt.title('Clustered image when K = %i' % K), plt.xticks([]), plt.yticks([])
plt.show()
# Analyze folder of images using k-means clustering
def analyze_folder(dpath, K, attempts):
counter = 0
k_file = open("kmeans.txt", "w")
for fname in os.listdir(dpath):
fpath = os.path.join(dpath, fname)
counter += 1
img = cv2.imread(fpath, cv2.COLOR_BGR2RGB)
gray = get_k_means(img, K, attempts)
picture_size = len(gray) * len(gray[0])
warm_object_size = float((count_unique(gray, 3) / picture_size)*100)
percentage_string = "{:.1f}".format(float((count_unique(gray, 1) / picture_size)*100)) + ",{:.1f}".format(float((count_unique(gray, 2) / picture_size)*100)) + ",{:.1f}".format(warm_object_size) + ",{:.1f}".format(float((count_unique(gray, 4) / picture_size)*100))
print(str(fpath) + ": " + str(np.unique(gray)) + ": " + percentage_string)
k_file.write(str(fpath) + "," + str(np.unique(gray)) + "," + percentage_string + '\n')
k_file.close()
# Move images to correct folder based on output of k-mean analyze_folder
def image_classification(kfile):
with open(kfile) as f:
line_count = len(open(kfile).readlines())
for i in range(line_count):
line = f.readline()
splitted_line = line.split(',')
warm_object = float(splitted_line[4])
if (warm_object < 1):
print(splitted_line[0] + ": " + splitted_line[4])
if warm_object > 0.5 and warm_object < 10:
shutil.copy(splitted_line[0], os.path.join('imgs_human', splitted_line[0].split('\\')[1]))
else:
shutil.copy(splitted_line[0], os.path.join('imgs_background', splitted_line[0].split('\\')[1]))
def main():
#analyze_image(os.path.join("../dataset/images_from_matlab_script", "fig_20210628_1630_3078_0376_10_22.png"), 4,10)
analyze_folder("../dataset/images_from_matlab_script", 4,10)
#image_classification("kmeans.txt")
if __name__ == "__main__":
main()
```
#### File: IAS0360_project/source/preprocess_and_train.py
```python
import json
from PIL.Image import SEQUENCE
import matplotlib
import matplotlib.pyplot as plt
from numpy.random.mtrand import shuffle
import cv2
import numpy as np
import scipy.ndimage as scpy
from tensorflow.keras.layers import Conv2D, MaxPool2D, Dropout, BatchNormalization, Flatten
from tensorflow.keras.callbacks import EarlyStopping
import tensorflow as tf
import sys
import random
import os
# when printing numpy array then all will be printed
#np.set_printoptions(threshold=sys.maxsize)
# 32 x 32
IMG_HEIGHT = 32
IMG_WIDTH = 32
# Due to sensor placement it seems that first rows are always
# cold and unable to detect humans
DEL_ROW_AMNT = 8
# IMG_Y_RESIZED = int((IMG_HEIGHT - DEL_ROW_AMNT) * 0.75)
# IMG_X_RESIZED = int(IMG_WIDTH * 2.0 * 0.75)
IMG_Y_RESIZED = IMG_HEIGHT - DEL_ROW_AMNT
IMG_X_RESIZED = IMG_WIDTH
# Sensor 3078
S3078_FILE = '../dataset/thermal_raw_20210507_full/20210507_1605_3078.txt'
# Sensor C088
SC088_FILE = '../dataset/thermal_raw_20210507_full/20210507_1605_C088.txt'
s3078_data_arr = []
sc088_data_arr = []
human_images = []
background_images = []
x_train = []
y_train = []
x_test = []
y_test = []
def readSensorData():
s3078_file = open(S3078_FILE, 'r')
sc088_file = open(SC088_FILE, 'r')
counter = 0
while True:
counter = counter + 1
# Get one sample from the file
s3078_sample = s3078_file.readline()
sc088_sample = sc088_file.readline()
# eof
if (not s3078_sample or not sc088_sample):
break
if (counter % 4 == 0):
# Convert sample into json form so it would be easier to parse
s3078_json = json.loads(s3078_sample)
sc088_json = json.loads(sc088_sample)
# Get the data part from the sample
s3078_data = np.array(s3078_json["data"])
sc088_data = np.array(sc088_json["data"])
s3078_data = np.delete(s3078_data, np.s_[0:DEL_ROW_AMNT], 0)
sc088_data = np.delete(sc088_data, np.s_[0:DEL_ROW_AMNT], 0)
s3078_data_arr.append(s3078_data)
sc088_data_arr.append(sc088_data)
# close sensor txt file
s3078_file.close()
sc088_file.close()
def removeHotPixels(img):
image = np.copy(img)
mean_temp = np.mean(image)
for i, row in enumerate(image):
for j, col in enumerate (row):
if (image[i][j] > mean_temp):
rand_float = (np.random.random() / 2) - 0.25
image[i][j] = mean_temp - 0.5 + rand_float
return image
def dataAugmentation():
for sample in s3078_data_arr:
# Human images
human_images.append(sample)
sample_cpy = np.copy(sample)
sample_cpy = scpy.median_filter(sample_cpy, size=(3,3))
human_images.append(sample_cpy)
sample_cpy = np.copy(sample)
sample_cpy = np.flip(sample_cpy, 1)
human_images.append(sample_cpy)
sample_cpy = scpy.median_filter(sample_cpy, size=(3,3))
human_images.append(sample_cpy)
# Background images
sample_no_hot_pixels = removeHotPixels(sample)
background_images.append(sample_no_hot_pixels)
sample_no_hot_pixels_filtered = scpy.median_filter(sample_no_hot_pixels, size=(3,3))
background_images.append(sample_no_hot_pixels_filtered)
np.random.shuffle(sample_no_hot_pixels)
background_images.append(sample_no_hot_pixels)
sample_no_hot_pixels_filtered = scpy.median_filter(sample_no_hot_pixels, size=(3,3))
background_images.append(sample_no_hot_pixels_filtered)
for sample in sc088_data_arr:
# Human images
human_images.append(sample)
sample_cpy = np.copy(sample)
sample_cpy = scpy.median_filter(sample_cpy, size=(3,3))
human_images.append(sample_cpy)
sample_cpy = np.copy(sample)
sample_cpy = np.flip(sample_cpy, 1)
human_images.append(sample_cpy)
sample_cpy = scpy.median_filter(sample_cpy, size=(3,3))
human_images.append(sample_cpy)
# Background images
sample_no_hot_pixels = removeHotPixels(sample)
background_images.append(sample_no_hot_pixels)
sample_no_hot_pixels_filtered = scpy.median_filter(sample_no_hot_pixels, size=(3,3))
background_images.append(sample_no_hot_pixels_filtered)
np.random.shuffle(sample_no_hot_pixels)
background_images.append(sample_no_hot_pixels)
sample_no_hot_pixels_filtered = scpy.median_filter(sample_no_hot_pixels, size=(3,3))
background_images.append(sample_no_hot_pixels_filtered)
def storeImages():
for i, img in enumerate(human_images):
# Multiplied by 10 in order not to lose precision
# For example 13.4 will be 134 rather than 13
img = img * 10
cv2.imwrite("./imgs_human/img{}.png".format(i), img)
# Resize images to be smaller
#img = cv2.imread("imgs_human/img{}.png".format(i))
#res = cv2.resize(img, (IMG_X_RESIZED, IMG_Y_RESIZED), interpolation = cv2.INTER_CUBIC)
#cv2.imwrite("imgs_human_resized/img{}.png".format(i), img)
for i, img in enumerate(background_images):
# Multiplied by 10 in order not to lose precision
# For example 13.4 will be 134 rather than 13
img = img * 10
cv2.imwrite("./imgs_background/img{}.png".format(i), img)
# Resize images to be smaller
#img = cv2.imread("imgs_background/img{}.png".format(i))
#res = cv2.resize(img, (IMG_X_RESIZED, IMG_Y_RESIZED), interpolation = cv2.INTER_CUBIC)
#cv2.imwrite("imgs_background_resized/img{}.png".format(i), img)
def prepareDataForTraining():
global x_train
global y_train
global x_test
global y_test
training_data_prct = 0.8
img_label_tuple = []
for idx, im in enumerate(os.listdir("imgs_human/")):
try:
img_array = cv2.imread(os.path.join("imgs_human/", im))
# Remove third dimension and divide by 10 to get original temp array
img_array = np.array(img_array[:, :, 0]) / 10
img_label_tuple.append((img_array, 1))
except Exception as e:
print("EXCEPTION")
pass
for idx, im in enumerate(os.listdir("imgs_background/")):
try:
img_array = cv2.imread(os.path.join("imgs_background/", im))
# Remove third dimension and divide by 10 to get original temp array
img_array = np.array(img_array[:, :, 0]) / 10
img_label_tuple.append((img_array, 0))
except Exception as e:
print("EXCEPTION")
pass
random.shuffle(img_label_tuple)
imgs, labels = zip(*img_label_tuple)
training_amount = int((len(imgs) * training_data_prct))
validation_amount = len(imgs) - training_amount
x_train = np.array(imgs[:training_amount])
y_train = np.array(labels[:training_amount])
x_test = np.array(imgs[(-validation_amount):])
y_test = np.array(labels[(-validation_amount):])
# Normalize everything
# x_train = tf.keras.utils.normalize(x_train)
# x_test = tf.keras.utils.normalize(x_test)
# TODO: something more reasonable perhaps
x_train = x_train / 255
x_test = x_test / 255
x_train = np.array(x_train).reshape((-1, IMG_Y_RESIZED, IMG_X_RESIZED, 1))
x_test = np.array(x_test).reshape((-1, IMG_Y_RESIZED, IMG_X_RESIZED, 1))
# TODO maybe: https://bleedai.com/human-activity-recognition-using-tensorflow-cnn-lstm/
def train():
model = tf.keras.models.Sequential()
model.add(Conv2D(32, kernel_size=(3,3), padding='same', activation='relu', input_shape=(IMG_Y_RESIZED, IMG_X_RESIZED, 1)))
model.add(Conv2D(32, kernel_size=(3,3), padding='same', activation='relu'))
model.add(MaxPool2D(pool_size=(2,2), strides=2))
model.add(Dropout(0.5))
model.add(Conv2D(64, kernel_size=(3,3), padding='same', activation='relu'))
model.add(Conv2D(64, kernel_size=(3,3), padding='same', activation='relu'))
model.add(MaxPool2D(pool_size=(2,2), strides=2))
model.add(Dropout(0.5))
model.add(Conv2D(128, kernel_size=(3,3), padding='same', activation='relu'))
#model.add(Conv2D(256, kernel_size=(3,3), padding='same', activation='relu'))
model.add(MaxPool2D(pool_size=(2,2), strides=2))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(tf.keras.layers.Dense(2))
model.summary()
# Define parameters for training the model
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=1e-4),
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), # BinaryCrossentropy
metrics=['accuracy'])
# Train model - Adjust model parameters to minimize the loss and train it
model.fit(x_train, y_train, epochs=2, batch_size=32)
# Evaluate model performance
val_loss, val_acc = model.evaluate(x_test, y_test)
print ("Validation evaluation results: loss - ", format(val_loss, '.3f'), "accuracy - ", format(val_acc, '.3f'))
model.save('models/my_mnist.model')
return model
def convertToTfLite(model):
# https://www.tensorflow.org/lite/convert/index
converter = tf.lite.TFLiteConverter.from_keras_model(model)
converter.optimizations = [tf.lite.Optimize.DEFAULT]
tflite_model = converter.convert()
with open('models/model.tflite', 'wb') as f:
f.write(tflite_model)
def runSomeInferenceTests(model):
# TODO: run it on some unseen data
predictions = model.predict(x_train[:10])
print (y_train[:10])
print (predictions)
def main():
readSensorData()
dataAugmentation()
storeImages()
prepareDataForTraining()
model = train()
convertToTfLite(model)
runSomeInferenceTests(model)
if __name__ == "__main__":
main()
# Write image to .txt file as C array
# with open('background.txt', 'w') as f:
# counter = 0
# for item in background_images:
# for i in item:
# f.write("{")
# for j in i:
# f.write("%.4s, " % j)
# f.write("},\n")
# f.write("\n")
# counter = counter +1
# print (item)
# if (counter >= 5):
# break
``` |
{
"source": "jonarani/random_walk",
"score": 3
} |
#### File: jonarani/random_walk/compressed_sensing2.py
```python
from calendar import c
from datetime import date
from multiprocessing.sharedctypes import Value
import numpy as np
import matplotlib.pyplot as plt
import csv
import scipy.ndimage
import datetime
import glob
from matplotlib.pyplot import figure
from scipy.interpolate import interp1d
import pandas as pd
DATA_DIR = 'tammsaare_street/*.csv'
first = '70B3D5E39000206E-data-2022-03-19 16 35 25.csv'
third = '70B3D5E39000237C-data-2022-03-19 16 33 03.csv'
tenth = '70B3D5E390002380-data-2022-03-19 16 34 35.csv'
DATES = {}
DATA = {}
def remove_duplicates(data, dates):
res = [idx for idx, val in enumerate(dates) if val in dates[:idx]]
data = np.delete(data, res)
dates = np.delete(dates, res)
return data, dates
def get_datetime_diff(d1, d2):
return ((d2 - d1).total_seconds() / 60)
def interpolate(data, dates):
new_data = np.zeros(0)
ts_range = pd.period_range(start=dates[0], end=dates[len(dates)-1], freq='T')
new_dates = ts_range.to_timestamp()
new_data = np.full(len(ts_range), np.nan)
# TODO: optimize
for i, new_date in enumerate(new_dates):
for j, old_date in enumerate(dates):
if (new_date == old_date):
new_data[i] = data[j]
break
df = pd.DataFrame({'Date': new_dates, 'Value': new_data})
res = df.set_index('Date')['Value'].interpolate(method="linear")
return np.asarray(res.values, dtype=int), np.asarray(new_dates)
# Read data from CSV files to dictionaries
for i, file in enumerate(glob.glob(DATA_DIR)):
with open(file, newline='') as csv_file:
csv_reader = csv.DictReader(csv_file)
decibels = np.zeros(0)
dates = []
for j, row in enumerate(csv_reader):
date_str = row["\ufeffsep="]
if (date_str != "Time"):
date = datetime.datetime(int(date_str[0:4]), int(date_str[5:7]), int(date_str[8:10]), int(date_str[11:13]), int(date_str[14:16]))
dates = np.append(dates, date)
val = row['']
if (val != "dt_sound_level_dB"):
val = int(val)
decibels = np.append(decibels, val)
# cut 'tammsaare_street' from the key
key = file[17:]
decibels = np.array(decibels, dtype=int)
DATES[key] = dates
DATA[key] = decibels
# Remove duplicates, interpolate
for key in DATA.keys():
print ("Processing {}...".format(key))
data, dates = remove_duplicates(DATA[key], DATES[key])
data, dates = interpolate(data, dates)
# Take the first 700
DATES[key] = dates[:700]
DATA[key] = data[:700]
# SVD
A = np.asarray((DATA['70B3D5E39000206E-data-2022-03-19 16 35 25.csv'],
DATA['70B3D5E39000235F-data-2022-03-19 16 33 37.csv'],
DATA['70B3D5E39000237C-data-2022-03-19 16 33 03.csv'],
DATA['70B3D5E390002007-data-2022-03-19 16 31 55.csv'],
DATA['70B3D5E390002009-data-2022-03-19 16 28 17.csv'],
DATA['70B3D5E390002021-data-2022-03-19 16 29 05.csv'],
DATA['70B3D5E390002043-data-2022-03-19 16 30 39.csv'],
DATA['70B3D5E390002047-data-2022-03-19 16 31 13.csv'],
DATA['70B3D5E390002093-data-2022-03-19 16 30 01.csv'],
DATA['70B3D5E390002380-data-2022-03-19 16 34 35.csv'],
))
A = np.transpose(A)
#Performing SVD
U, D, VT = np.linalg.svd(A)
# 1st dimension denotes ranks
# 2nd dimension denotes the dataset
# 3rd dimension denotes sensors, where index 0 is dataset 0, index 1 dataset 2, index 2, dataset 9
A_remake = np.zeros((10, 700, 3))
realD = np.zeros((700, 10))
k = 10
for i in range(k):
realD[i][i] = D[i]
a_remake_k = U @ realD @ VT
# Recreate 1st, 3rd and 10th dataset
for c, d in zip([0, 2, 9], [0, 1, 2]):
for r in range (700):
A_remake[i][r][d] = a_remake_k[r][c]
# sensors, ranks
RMSE = np.zeros((3, 10))
for j in range (10): # ranks
rmse1 = 0
rmse2 = 0
rmse3 = 0
for k in range (700): # data
rmse1 = rmse1 + DATA[first][k] - A_remake[j][k][0]
rmse2 = rmse2 + DATA[third][k] - A_remake[j][k][1]
rmse3 = rmse3 + DATA[tenth][k] - A_remake[j][k][2]
rmse1 = np.sqrt(np.square(rmse1) / 700)
rmse2 = np.sqrt(np.square(rmse2) / 700)
rmse3 = np.sqrt(np.square(rmse3) / 700)
RMSE[0][j] = rmse1
RMSE[1][j] = rmse2
RMSE[2][j] = rmse3
print (RMSE)
# dataset 1, 3, 10
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('ranks')
ax1.set_ylabel('Singular values', color=color)
ax1.plot(D, 'ro-', label='singular values')
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = 'tab:blue'
ax2.set_ylabel('RMSE', color=color) # we already handled the x-label with ax1
ax2.plot(RMSE[0], 'b.-', label='dataset1 rmse')
ax2.plot(RMSE[1], 'g.-', label='dataset3 rmse')
ax2.plot(RMSE[2], 'k.-', label='dataset10 rmse')
ax2.tick_params(axis='y', labelcolor=color)
plt.legend()
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.show()
``` |
{
"source": "jonaraphael/skytruth",
"score": 3
} |
#### File: skytruth/ceruleanserver/db_connection.py
```python
import psycopg2
import pandas as pd
from configs import server_config
import json
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker, Query
from contextlib import contextmanager
class DBConnection:
"""A class that knows how to connect to and manage connections to the DB
"""
def __init__(
self,
host=server_config.DB_HOST,
user=server_config.DB_USER,
password=<PASSWORD>,
database=server_config.DB_DATABASE,
port=server_config.DB_PORT,
dbtype=server_config.DB_TYPE,
):
self.engine = create_engine(
f"{dbtype}://{user}:{password}@{host}:{port}/{database}",
echo=server_config.ECHO_SQL,
)
self.sess = sessionmaker(bind=self.engine)()
@contextmanager
def session_scope():
"""Provide a transactional scope around a series of operations."""
db = DBConnection()
try:
yield db.sess
db.sess.commit()
except:
db.sess.rollback()
raise
finally:
db.sess.close()
def unique_join(self, *props, **kwargs):
""" This is a function added to the query object, that allows programmatic
creation of queries by allowing repeated calling of any class that is
already joined without causing an error.
"""
if props[0] in [c.entity for c in self._join_entities]:
return self
return self.join(*props, **kwargs)
Query.unique_join = unique_join
# XXXHELP For some reason this returns None instead of 0, and 2 instead of 1!!!
# def get_count(q):
# count_q = q.statement.with_only_columns([func.count()]).order_by(None)
# count = q.session.execute(count_q).scalar()
# return count
```
#### File: ceruleanserver/ml/inference.py
```python
from pathlib import Path
from fastai2.learner import load_learner
from subprocess import run, PIPE
import sys
import json
from ml.vector_processing import (
unify,
sparsify,
intersection,
shapely_to_geojson,
)
from ml.raster_processing import (
inference_to_poly,
merge_chips,
inference_to_geotiff,
img_chip_generator,
resize,
)
sys.path.append(str(Path(__file__).parent.parent))
from configs import ( # pylint: disable=import-error
server_config,
path_config,
ml_config,
)
from utils.common import clear, create_pg_array_string
def run_inference(infero):
multi_machine(infero)
return infero.geom_path
def multi_machine(infero, out_path=None):
"""Run inference on a GRD using multiple ML PKLs, and combine them to get a single multipolygon
Arguments:
pid {str} -- Sentinel 1 product ID, e.g. 'S1A_IW_GRDH_1SDV_20200406T194140_20200406T194205_032011_03B2AB_C112'
pkls {list of strs} -- list of model pkls to run on indicated product e.g. ["2_18_128_0.676.pkl", "2_18_256_0.691.pkl", "2_18_512_0.705.pkl"]
Keyword Arguments:
thresholds {int or list of ints} -- Thresholds to be applied to each ML PKL respectively (default: {ml_config.ML_THRESHOLDS})
grd_path {Path} -- Location of GRD tiff (default: {grd_path = Path(path_config.LOCAL_DIR) / "temp" / pid / "vv_grd.tiff"})
out_path {Path} -- Location where final GeoJSON should be saved (default: {grd_path.with_name(f"slick_{'-'.join([str(t) for t in thresholds])}conf.geojson")})
fine_pkl_idx {int} -- Which PKL gives the finest resolution result, will be used to trim output (default: {-1})
Returns:
Path -- out_path
"""
working_dir = infero.grd_path.parent
out_path = out_path or infero.geom_path
if not isinstance(infero.thresholds, list):
infero.thresholds = [infero.thresholds] * len(infero.ml_pkls)
# Run machine learning on vv_grd to make 3 contours
geojson_paths = []
for i, pkl in enumerate(infero.ml_pkls):
if server_config.VERBOSE:
print("Running Inference on", pkl)
inference_path = (working_dir / pkl).with_suffix(".tiff")
if not inference_path.exists() and server_config.RUN_ML:
learner = load_learner_from_s3(pkl, False)
machine(learner, infero, out_path=inference_path)
geojson_path = inference_to_poly(inference_path, infero.thresholds[i])
geojson_paths += [geojson_path]
union = unify(geojson_paths) # Returnds shapely multipolygon
sparse = sparsify(union, geojson_paths)
inter = intersection(geojson_paths[infero.fine_pkl_idx], sparse)
infero.polys = [poly for poly in inter]
shapely_to_geojson(inter, out_path)
return out_path
def machine(learner, infero, out_path=None):
"""Run machine learning on a downloaded image
Arguments:
learner {fastai2 learner} -- A trained and loaded learner
img_path {Path} -- Location of the large image to be processed
Keyword Arguments:
inf_dir {Path} -- Directory where inference chips should be stored (default: {None})
inf_path {Path} -- Location of the stitched-together output (default: {None})
"""
# Prepare some constants
img_path = infero.grd_path
out_path = (
out_path or img_path.parent / "inference.tiff"
) # Where the merged inference mask should be stored
inf_dir = out_path.parent / "inf" # Where the inference chips should be stored
# Cut up the GTiff into many small TIFs
chp_gen = img_chip_generator(
infero.grd_path,
infero.chip_size_orig,
infero.chip_size_reduced,
infero.overhang,
out_dir=inf_dir,
)
for i, chp_path in enumerate(chp_gen):
infer(chp_path, learner) # Run Inference on the current chip
# Merge the masks back into a single image
merge_chips(inf_dir, out_path)
def infer(chip_path, learner):
"""Run inference on a chip
Arguments:
chip_path {Path} -- Location of a single chip
learner {fastai2 model} -- Loaded fastai2 pkl file
"""
_, _, pred_class = learner.predict(
chip_path
) # Currently returns classes [not bilge, bilge, vessel]
# target_size = learner.dls.after_batch.size
target_size = (
ml_config.CHIP_SIZE_REDUCED
) # XXXJona figure out if there is another way to make this work for all learners
tiff_path = inference_to_geotiff(pred_class[1], chip_path, target_size)
return tiff_path
def load_learner_from_s3(pkl_name, update_ml=server_config.UPDATE_ML):
"""Import the latest trained model from S3
Keyword Arguments:
pkl_name {str} -- Name of pickled model to use (default: {'0_18_512_0.722.pkl'})
Returns:
fastai2_learner -- A learner with the model already loaded into memory
"""
pkl_path = Path(path_config.LOCAL_DIR) / "models" / pkl_name
if server_config.VERBOSE:
print("Loading Learner")
if update_ml:
clear(pkl_path)
if not pkl_path.exists(): # pylint: disable=no-member
src_path = "s3://skytruth-cerulean/model_artifacts/" + str(pkl_name)
download_str = f"aws s3 cp {src_path} {pkl_path}"
# print(download_str)
run(download_str, shell=True)
return load_learner(pkl_path)
sys.modules["__main__"].__dict__[
"get_lbls"
] = None # This is required to enable the pickle to load
```
#### File: ceruleanserver/utils/common.py
```python
from pathlib import Path
from datetime import datetime, timezone
import shapely.geometry as sh
from subprocess import run, PIPE
import json
import sys
sys.path.append(str(Path(__file__).parent.parent))
from configs import path_config, server_config
def clear(p):
"""Delete file if it exists
Arguments:
p {Path} -- file to be deleted
"""
# This function can be replced by Path.unlink.(missing_ok=True) when we upgrade python to 3.8
if p.exists():
p.unlink()
def xml_get(lst, a, key1="@name", key2="#text"):
"""Extract a field from parsed XML
Arguments:
lst {list} -- a list of elements all sharing the same data type (e.g. str)
a {str} -- the name of an XML tag you want
Keyword Arguments:
key1 {str} -- the field where a is stored (default: {"@name"})
key2 {str} -- the type of data that a is (default: {"#text"})
Returns:
any -- the value of the XML tag that has the name 'a'
"""
# from a lst of dcts, find the dct that has key value pair (@name:a), then retrieve the value of (#text:?)
if lst == None:
return None # This is a hack for the case where there is no OCN product. TODO handle absent OCN higher up
for dct in lst:
if dct.get(key1) == a:
return dct.get(key2)
return None
def load_ocean_shape(geom_name=server_config.OCEAN_GEOJSON):
"""Read the ocean GeoJSON into memory once, so that it is accessible for all future functions
Returns:
[Geometry] -- A Shapely geometry produced from a GeoJSON
"""
geom_path = Path(path_config.LOCAL_DIR) / "aux_files" / geom_name
if server_config.VERBOSE:
print("Loading Ocean GeoJSON")
if not geom_path.exists(): # pylint: disable=no-member
src_path = "s3://skytruth-cerulean/aux_files/" + str(geom_name)
download_str = f"aws s3 cp {src_path} {geom_path}"
# print(download_str)
run(download_str, shell=True)
with open(path_config.LOCAL_DIR + "aux_files/OceanGeoJSON_lowres.geojson") as f:
ocean_features = json.load(f)["features"]
geom = sh.GeometryCollection(
[sh.shape(feature["geometry"]).buffer(0) for feature in ocean_features]
)[0]
return geom
def load_shape(geom_name, as_multipolygon=False):
"""Read a GeoJSON into memory once, so that it is accessible for all future functions
Returns:
[Geometry] -- A Shapely geometry produced from a GeoJSON
"""
geom_path = Path(path_config.LOCAL_DIR) / "aux_files" / geom_name
if server_config.VERBOSE:
print("Loading GeoJSON:", geom_name)
print()
if not geom_path.exists(): # pylint: disable=no-member
src_path = "s3://skytruth-cerulean/aux_files/" + geom_name
download_str = f"aws s3 cp {src_path} {geom_path}"
# print(download_str)
run(download_str, shell=True)
with open(geom_path) as f:
geom = json.load(f)["features"]
if as_multipolygon:
geom = sh.GeometryCollection(
[sh.shape(feature["geometry"]).buffer(0) for feature in geom]
)[0]
return geom
def create_pg_array_string(lst):
if isinstance(lst[0], str):
out = '{"' + '","'.join(lst) + '"}'
elif isinstance(lst[0], (int, float)):
out = "{" + ",".join([str(l) for l in lst]) + "}"
else:
print("ERROR -- Unkown type being turned into string")
return out
def to_standard_datetime_str(dt):
datetime_str = dt.strftime("%Y-%m-%dT%H:%M:%SZ")
return datetime_str
``` |
{
"source": "jonarbo/KUBE",
"score": 3
} |
#### File: kube/callbacks/clean.py
```python
import sys
import shutil
from datetime import datetime , timedelta
# Import the printer
from kube.printer import *
# Import some useful stuff
from kube.utils import *
# Import core engine
from kube.engine import KUBE
def cleanup(dir,since,to,delta):
# borrar solo el dir seleccionado entre since y to:
printer.info( "Cleaning","You are about to remove all data in " + Printer.bold(dir) +" since " + Printer.bold(str(since)) + " to " + Printer.bold(str(to)) )
var = raw_input("Are you sure you want to do that? (Yes/No)")
if var=="Yes":
for dd in walkDir(dir,to,delta):
clean( dd ,True)
# remove if empty ...
pd = os.path.dirname(dd)
if not os.listdir( pd ) :
shutil.rmtree( pd )
else:
printer.info( "Cleaning","cancelled")
return
# cleaning empty dirs
repeat = True
import glob
while repeat:
repeat = False
for dd in walkDir(dir):
if len( glob.glob(dd+"/*") )==0:
repeat = True
clean( dd ,True)
printer.info("Cleaning", "Done." )
def start( args ):
"""
Entry point for the 'clean' command.
Synopsis:
kube.py clean [-d {runs,results}] [--since SINCE] [--to TO] [-a APPS] [-n NETS] [-f FILESYS] [-s SYNTHS]
"""
# create the engine instance
kube = KUBE()
cleaner={\
'runs': kube.runs_dir, \
'results': kube.results_dir }
#printer.setCurrentTheme('None')
header = "Cleaning"
opts = args.keys()
if len(opts)==0:
# By default clean cleans the runs...
# printer.warning(header, Printer.bold("You are about to remove all stored results") )
printer.warning(header, "You are about to remove all runs in: " + Printer.bold(cleaner['runs']) )
var = raw_input("Are you sure you want to do that? (Yes/No)")
if var=="Yes":
clean( cleaner['runs'] )
#clean( cleaner['results'] )
printer.info( header, "Done." )
else:
printer.info( header, "cancelled" )
# end exit
sys.exit(0)
delta=None
since=None
to=datetime.now()
if args.keys().count('since') !=0 :
since=parser.parse(args['since'])
if args.keys().count('to') !=0 :
to=parser.parse(args['to'])
if since:
delta = to-since
else:
delta = to - datetime(1973,05,02)
since = 'origin'
if args.keys().count('apps') == 0 and \
args.keys().count('nets') == 0 and \
args.keys().count('filesys') == 0 and \
args.keys().count('synths') == 0 :
if args.keys().count('d') == 0 :
dir = cleaner['runs'] + "/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/"
cleanup(dir,since,to,delta)
else:
if args.keys().count('apps') != 0 :
if args['apps'].lower() == "all":
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/apps/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/apps/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/apps/"
cleanup(dir,since,to,delta)
else:
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/apps/" + args['apps'] + "/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/apps/" + args['apps'] + "/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/apps/" + args['apps'] + "/"
cleanup(dir,since,to,delta)
if args.keys().count('nets') != 0 :
if args['nets'].lower() == "all":
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/networks/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/networks/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/networks/"
cleanup(dir,since,to,delta)
else:
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/networks/" + args['nets'] + "/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/networks/" + args['nets'] + "/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/networks/" + args['nets'] + "/"
cleanup(dir,since,to,delta)
if args.keys().count('filesys') != 0 :
if args['filesys'].lower() == "all":
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/filesystems/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/filesystems/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/filesystems/"
cleanup(dir,since,to,delta)
else:
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/filesystems/" + args['filesys'] + "/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/filesystems/" + args['filesys'] + "/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/filesystems/" + args['filesys'] + "/"
cleanup(dir,since,to,delta)
if args.keys().count('synths') != 0 :
if args['synths'].lower() == "all":
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/synthetics/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/synthetics/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/synthetics/"
cleanup(dir,since,to,delta)
else:
if args.keys().count('d') == 0:
dir = cleaner['runs'] + "/synthetics/" + args['synths'] + "/"
cleanup(dir,since,to,delta)
dir = cleaner['results'] + "/synthetics/" + args['synths'] + "/"
cleanup(dir,since,to,delta)
else:
dir = cleaner[args['d']] + "/synthetics/" + args['synths'] + "/"
cleanup(dir,since,to,delta)
```
#### File: kube/callbacks/run.py
```python
from kube.utils import *
# Import core engine
from kube.engine import KUBE
import datetime
def start( args ):
"""
Entry point for the 'run' command.
Synopsis:
kube.py run [-a APPS] [-n NETS] [-f FILESYS] [-s SYNTHS] [--log FILE]
"""
configfile=None
if args.keys().count('configfile') != 0:
configfile=args['configfile']
del args['configfile']
# create the engine instance
kube = KUBE(configfile)
opts = args.keys()
if 'log' in opts:
Printer.setLogfile(args['log'])
printer.plain("--------------------------------------------")
printer.info("Kube run on date",str(datetime.datetime.now()))
printer.plain("--------------------------------------------")
# remove the --log from the args
del ( args['log'] )
if len( args.keys())==0:
# Run everything
kube.run()
else:
for o in opts:
if o != 'log':
what = o
items = args[what].split(',')
if items[0].lower()=='all':
kube.run(what)
else:
for i in items:
kube.run(what,i)
```
#### File: kube/callbacks/view.py
```python
from kube.utils import *
# Import core engine
from kube.engine import KUBE
def start( args ):
"""
Entry point for the 'view' command.
Synopsis:
kube.py view [-a APPS] [-n NETS] [-f FILESYS] [-s SYNTHS]
"""
configfile=None
if args.keys().count('configfile') != 0:
configfile=args['configfile']
del args['configfile']
# create the engine instance
kube = KUBE(configfile)
if len( args.keys())==0:
# view everything
kube.view()
else:
opts = args.keys()
for o in opts:
what = o
items = args[what].split(',')
if items[0].lower()=='all':
kube.view(what)
else:
for i in items:
kube.view(what,i)
```
#### File: lib/kube/printer.py
```python
import sys, os
class Logger(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Logger, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self, filename="kube.log"):
self.terminal = sys.stdout
try:
self.log = open(filename, "a")
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
sys.exit(e.errno)
def write(self, message):
self.terminal.write(message)
self.log.write(message)
@staticmethod
def getInstance():
return Logger._instance
######################################################
#
# Printer Class
#
######################################################
class Printer(object):
""" Singleton class to print out with fancy colors """
_End="\033[0;0m" # End string
_theme = None
_themeName =''
_instance = None
_log = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Printer, cls).__new__(cls, *args, **kwargs)
cls._themeName = 'WhiteOnBlack'
cls._theme = Printer._Themes[cls._themeName ]
cls.Level = 0
cls._log = None
return cls._instance
# Decorator to make this class loggable to file
def __loggable(f):
def inner(*args,**kwargs):
if Printer._log:
if not Logger.getInstance():
sys.stdout = Logger(Printer._log)
if f.__name__ == 'bold':
return str(args[0])
elif f.__name__ != None:
if kwargs.keys().count('color')!=0:
kwargs['color'] = None
f(*args,**kwargs)
else:
logger = Logger.getInstance()
if logger:
del logger
if f.__name__ == 'bold':
return f(str(args[0]))
else:
f(*args,**kwargs)
return inner
#######################################
# class members
Level = 0
_Themes={\
'None':{
'Error':"\033[1m", \
'Bold':"\033[1m", \
'Info':"\033[1m", \
'Warning':"\033[1m", \
},\
'WhiteOnBlack' : {\
'Error':"\033[0;41m", # red\
'Bold':"\033[1m", \
#'Bold':"\033[1;42m", # green\
'Info':"\033[1;94m", # blue\
'Warning':"\033[0;43m", # orange\
}
}
@staticmethod
@__loggable
def bold(str):
return Printer._theme['Bold'] + str + Printer._End
@staticmethod
def setLogfile(logfile=None):
Printer._log = logfile
#######################################
def getCurrentTheme(self):
return Printer._themeName
def setCurrentTheme(self,name):
if not name in Printer._Themes.keys():
print "\'" + name + "\' not a theme"
else:
Printer._themeName = name
Printer._theme = Printer._Themes[Printer._themeName]
#######################################
# Private methods
@__loggable
def __printout(self,header,message=None,wait=None,color=None ):
for i in range (0 ,Printer.Level):
print "\t",
if message :
if color:
if wait:
print color + header + ":" + Printer._End + " " + message ,
else:
print color + header + ":" + Printer._End + " " + message
else:
if wait:
print header + ": " + message ,
else:
print header + ": " + message
else:
if color:
if wait:
print color + header + Printer._End ,
else:
print color + header + Printer._End
else:
if wait:
print header,
else:
print header
#######################################
#
# Methods
#
def plain(self, header, message=None,wait=None):
self.__printout(header,message,wait)
def warning( self, header, message=None,wait=None):
tcolor = Printer._theme['Warning']
self.__printout(header,message,wait,color=tcolor)
def info( self,header, message=None,wait=None):
tcolor = Printer._theme['Info']
self.__printout(header,message,wait,color=tcolor)
def error( self, header,message=None,wait=None):
tcolor = Printer._theme['Error']
self.__printout(header,message,wait,color=tcolor)
printer = Printer()
``` |
{
"source": "jonarce/refreshdata",
"score": 3
} |
#### File: src/upload/refreshdata.py
```python
from gi.types import nothing
from orca import input_event
from zim import datetimetz
__author__ = "<NAME>"
__copyright_ = "Copyright 2020"
__credits__ = ["<NAME>"]
__license__ = "Apache"
__version__ = "1.0.2"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Production"
# DEPENDENCIES:
# pip3 install xmltodict
#
import sys
import yaml
import base64
import datetime
#from posix import getcwd
import xmltodict
#import xml.etree.ElementTree as xml
import csv
from string import Template
# Database: Postgres
import psycopg2
# Function to substitute placeholders for actual values commming from the source
def replace_vals(str_template, vals):
# escape any required SQL character
for key, value in vals.items():
# set empty values to NONE
if empty(value):
vals[key] = 'NULL'
# escape required SQL characters in strings
elif isinstance(value, str):
vals[key] = value.replace('\'', '\"')
t=Template(str_template)
return t.substitute(vals)
# Check if variable is empty
def empty(value):
if isinstance(value, datetime.datetime):
return False
else:
if not value:
return True
else:
return False
if __name__ == '__main__':
now = datetime.datetime.now()
lines = 0
print("Starting job..." + str(now))
# Print arguments one by one
job_file = str(sys.argv[1])
#check if job file exists
try:
f = open(job_file)
# Do something with the file
except FileNotFoundError:
print("ERROR: YAML file not accessible")
sys.exit()
finally:
f.close()
print ('Job file:', job_file)
# read all YAML parameters in a Dict.
with open(job_file) as f:
job_doc = yaml.load(f, Loader=yaml.FullLoader)
print('JOB NAME: ',job_doc['job']['common']['job-name'])
print('JOB TYPE: ',job_doc['job']['common']['mode'])
print(job_doc)
# Open source
if (job_doc['job']['source']['mode'] == 'file'):
# open database connection
check_exists_sql = job_doc['job']['target']['check-exists-sql']
conn = psycopg2.connect(
host=job_doc['job']['target']['server'],
database=job_doc['job']['target']['database'],
user=str(base64.urlsafe_b64decode(job_doc['job']['target']['user']), "utf-8"),
password=str(base64.urlsafe_b64decode(job_doc['job']['target']['password']), "utf-8"))
cur = conn.cursor()
# file reader settings
file_name = job_doc['job']['source']['file-name']
file_encoding = job_doc['job']['source']['encoding']
file_dialect = job_doc['job']['source']['dialect']
file_delimiter = job_doc['job']['source']['delimiter']
file_quotechar = job_doc['job']['source']['quote-char']
# open as a dictionary
source_reader = csv.DictReader(open(file_name, "rt", encoding = file_encoding),
dialect = file_dialect,
# fieldnames=job_doc['job']['source']['headers'],
#delimiter=',', quotechar='"')
delimiter = file_delimiter,
quotechar = file_quotechar
)
check_exists_sql = job_doc['job']['target']['check-exists-sql']
insert_sql = job_doc['job']['target']['insert-sql']
update_sql = job_doc['job']['target']['update-sql']
timestamp = datetime.datetime.now()
timestamp_field = job_doc['job']['target']['timestamp']
# delete old records / not touched by this batch
before_import_sql = job_doc['job']['target']['before-import-sql']
if not empty(before_import_sql):
print(' BEFORE IMPORT...', end="")
job_params = {}
# add timestamp to row of data
job_params[timestamp_field] = timestamp
sql_query = replace_vals(before_import_sql, job_params)
print(sql_query)
cur.execute(sql_query)
conn.commit()
for data_row in source_reader:
# add timestamp to row of data
data_row[timestamp_field] = timestamp
print(lines+1, ":", end="")
# check if record exists
check_sql_query = replace_vals(check_exists_sql, data_row)
cur.execute(check_sql_query)
check_records = cur.fetchall()
# if record exists then UPDATE
if (cur.rowcount):
print(' UPDATE...', end="")
sql_query = replace_vals(update_sql, data_row)
# if new record then INSERT
else:
print(' INSERT...', end="")
sql_query = replace_vals(insert_sql, data_row)
# print('SQL Query: ',sql_query)
cur.execute(sql_query)
conn.commit()
lines += 1
print([(k, data_row[k]) for k in data_row])
# input("Press Enter to continue...")
# delete old records / not touched by this batch
delete_old_sql = job_doc['job']['target']['delete-old-sql']
if not empty(delete_old_sql):
print(' DELETE...', end="")
job_params = {}
# add timestamp to row of data
job_params[timestamp_field] = timestamp
sql_query = replace_vals(delete_old_sql, job_params)
print(sql_query)
cur.execute(sql_query)
conn.commit()
# execute the after-import-sql
after_import_sql = job_doc['job']['target']['after-import-sql']
if not empty(after_import_sql):
print(' AFTER IMPORT...', end="")
job_params = {}
# add timestamp to row of data
job_params[timestamp_field] = timestamp
sql_query = replace_vals(after_import_sql, job_params)
print(sql_query)
cur.execute(sql_query)
conn.commit()
# print end of job
now = datetime.datetime.now()
print(' ending job...',str(now),' lines:',str(lines))
# close database
cur.close()
conn.close()
f.close()
# EOF
``` |
{
"source": "jonardonh/KickassTorrentsAPI",
"score": 2
} |
#### File: jonardonh/KickassTorrentsAPI/app.py
```python
from KickAssAPI import KickAssAPI
from flask import Flask, request, jsonify
API = KickAssAPI()
app = Flask(__name__)
@app.route('/')
def index():
return 'KickAssAPI up and running!'
@app.route('/search')
def search():
torrent = request.args.get('torrent')
if torrent:
resp = API.search(torrent)
return jsonify(resp)
else:
return jsonify({'error': 'No query provided'})
@app.route('/magnet')
def magnet():
page_url = request.args.get('page_url')
if page_url:
magnet_link = API.magnet(page_url)
return jsonify({'magnet': magnet_link})
else:
return jsonify({'error': 'No magnet provided'})
if __name__ == '__main__':
app.run(debug=True)
```
#### File: jonardonh/KickassTorrentsAPI/KickAssAPI.py
```python
import requests
from bs4 import BeautifulSoup
class KickAssAPI():
def __init__(self):
self.headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36'}
def search(self,query) -> dict:
""" Returns a list of dictionaries with the results of the search """
url = "https://katcr.to/usearch/" + query + "/"
results = requests.get(url, headers=self.headers)
if results.status_code == 200:
soup = BeautifulSoup(results.text, "html.parser")
resp = soup.find_all("a", {"class": "cellMainLink"})
search_results = {}
n = 1
for r in resp:
title = r.text.replace("\n","").strip()
page_url = r.get("href").strip()
search_results[n] = {"title": title, "page_url": page_url}
n+=1
results.close()
elif results.status_code == 403:
print("\nThe URL (\"https://katcr.to\") responded with code 403.\nThis means that the server understood the request but refuses to authorize it.")
results.close()
exit()
elif results.status_code == 404:
print("\nThe URL (\"https://katcr.to\") responded with code 404.\nThis means that the server cannot find the page you requested. ")
results.close()
exit()
else:
results.close()
print("\nThe URL (\"https://katcr.to\") responded with code " + str(results.status_code) + ".\nThis means that the server is not responding to the request.")
return search_results
def magnet(self,page_url) -> str:
""" Returns the magnet link of the selected torrent """
magnet_page = requests.get("https://katcr.to"+page_url, headers=self.headers)
magnet_page_bs = BeautifulSoup(magnet_page.text, "html.parser")
magnet_link = magnet_page_bs.find("a", {"class": "kaGiantButton"}).get("href")
magnet_page.close()
return magnet_link
``` |
{
"source": "jon-armstrong/pyrox",
"score": 3
} |
#### File: pyrox/pyrox/log.py
```python
import logging
_LOG_LEVEL_NOTSET = 'NOTSET'
def get_logger(logger_name):
return _LOGGING_MANAGER.get_logger(logger_name)
def get_log_manager():
return _LOGGING_MANAGER
class LoggingManager(object):
def __init__(self):
self._root_logger = logging.getLogger()
self._handlers = list()
def _add_handler(self, handler):
self._handlers.append(handler)
self._root_logger.addHandler(handler)
def _clean_handlers(self):
"""
Removes all current handlers.
TODO:Review - Not sure if this may cause problems.
"""
[self._root_logger.removeHandler(hdlr) for hdlr in self._handlers]
del self._handlers[:]
def configure(self, cfg):
self._clean_handlers()
# Configuration handling
self._root_logger.setLevel(cfg.logging.verbosity)
if cfg.logging.logfile is not None:
self._add_handler(logging.FileHandler(cfg.logging.logfile))
if cfg.logging.console is True:
self._add_handler(logging.StreamHandler())
def get_logger(self, logger_name):
logger = logging.getLogger(logger_name)
logger.setLevel(_LOG_LEVEL_NOTSET)
return logger
globals()['_LOGGING_MANAGER'] = LoggingManager()
```
#### File: pyrox/server/proxyng.py
```python
import socket
import tornado
import tornado.ioloop
import tornado.process
from .routing import RoundRobinRouter, PROTOCOL_HTTP, PROTOCOL_HTTPS
from pyrox.tstream.iostream import (SSLSocketIOHandler, SocketIOHandler,
StreamClosedError)
from pyrox.tstream.tcpserver import TCPServer
from pyrox.log import get_logger
from pyrox.about import VERSION
from pyrox.http import (HttpRequest, HttpResponse, RequestParser,
ResponseParser, ParserDelegate)
import traceback
_LOG = get_logger(__name__)
"""
String representing a 0 length HTTP chunked encoding chunk.
"""
_CHUNK_CLOSE = b'0\r\n\r\n'
"""
Default return object on error. This should be configurable.
"""
_BAD_GATEWAY_RESP = HttpResponse()
_BAD_GATEWAY_RESP.version = b'1.1'
_BAD_GATEWAY_RESP.status = '502 Bad Gateway'
_BAD_GATEWAY_RESP.header('Server').values.append('pyrox/{}'.format(VERSION))
_BAD_GATEWAY_RESP.header('Content-Length').values.append('0')
"""
Default return object on no route or upstream not responding. This should
be configurable.
"""
_UPSTREAM_UNAVAILABLE = HttpResponse()
_UPSTREAM_UNAVAILABLE.version = b'1.1'
_UPSTREAM_UNAVAILABLE.status = '503 Service Unavailable'
_UPSTREAM_UNAVAILABLE.header('Server').values.append('pyrox/{}'.format(VERSION))
_UPSTREAM_UNAVAILABLE.header('Content-Length').values.append('0')
def _write_to_stream(stream, data, is_chunked, callback=None):
if is_chunked:
# Format and write this chunk
chunk = bytearray()
chunk.extend(hex(len(data))[2:])
chunk.extend('\r\n')
chunk.extend(data)
chunk.extend('\r\n')
stream.write(chunk, callback)
else:
stream.write(data, callback)
class AccumulationStream(object):
def __init__(self):
self.bytes = bytearray()
def write(self, data):
self.bytes.extend(data)
def size(self):
return len(self.bytes)
class ProxyHandler(ParserDelegate):
"""
Common class for the stream handlers. This parent class manages the
following:
- Handling of header field names.
- Tracking rejection of message sessions.
"""
def __init__(self, filter_pl, http_msg):
self._filter_pl = filter_pl
self._http_msg = http_msg
self._chunked = False
self._last_header_field = None
self._intercepted = False
def on_http_version(self, major, minor):
self._http_msg.version = '{}.{}'.format(major, minor)
def on_header_field(self, field):
self._last_header_field = field
def on_header_value(self, value):
header = self._http_msg.header(self._last_header_field)
header.values.append(value)
self._last_header_field = None
class DownstreamHandler(ProxyHandler):
"""
This proxy handler manages data coming from downstream of the proxy.
This data comes from the client initiating the request against the
proxy.
"""
def __init__(self, downstream, filter_pl, connect_upstream):
super(DownstreamHandler, self).__init__(filter_pl, HttpRequest())
self._downstream = downstream
self._upstream = None
self._preread_body = None
self._connect_upstream = connect_upstream
def _store_chunk(self, body_fragment):
if not self._preread_body:
self._preread_body = bytearray()
self._preread_body.extend(body_fragment)
def on_req_method(self, method):
self._http_msg.method = method
def on_req_path(self, url):
self._http_msg.url = url
def on_headers_complete(self):
# Execute against the pipeline
action = self._filter_pl.on_request_head(self._http_msg)
# If we are intercepting the request body do some negotiation
if self._filter_pl.intercepts_req_body():
self._chunked = True
# If there's a content length, negotiate the tansfer encoding
if self._http_msg.get_header('content-length'):
self._http_msg.remove_header('content-length')
self._http_msg.remove_header('transfer-encoding')
self._http_msg.header('transfer-encoding').values.append('chunked')
# If we're rejecting then we're not going to connect to upstream
if action.intercepts_request():
self._intercepted = True
self._response_tuple = action.payload
else:
# Hold up on the client side until we're done negotiating
# connections.
self._downstream.handle.disable_reading()
# We're routing to upstream; we need to know where to go
if action.is_routing():
self._connect_upstream(self._http_msg, action.payload)
else:
self._connect_upstream(self._http_msg)
def on_body(self, bytes, length, is_chunked):
self._chunked = is_chunked
if self._downstream.reading():
# Hold up on the client side until we're done with this chunk
self._downstream.handle.disable_reading()
# Rejections simply discard the body
if not self._intercepted:
accumulator = AccumulationStream()
data = bytes
self._filter_pl.on_request_body(data, accumulator)
if accumulator.size() > 0:
data = accumulator.bytes
if self._upstream:
# When we write to the stream set the callback to resume
# reading from downstream.
_write_to_stream(self._upstream, data, is_chunked,
self._downstream.handle.resume_reading)
else:
# If we're not connected upstream, store the fragment
# for later
self._store_chunk(data)
def on_upstream_connect(self, upstream):
self._upstream = upstream
if self._preread_body and len(self._preread_body) > 0:
_write_to_stream(self._upstream, self._preread_body,
self._chunked,
self._downstream.handle.resume_reading)
self._preread_body = None
def on_message_complete(self, is_chunked, keep_alive):
callback = self._downstream.close
# Enable reading when we're ready later
self._downstream.handle.disable_reading()
if keep_alive:
self._http_msg = HttpRequest()
if self._intercepted:
self._downstream.write(self._response_tuple[0].to_bytes(), callback)
elif is_chunked or self._chunked:
# Finish the last chunk.
self._upstream.write(_CHUNK_CLOSE)
class UpstreamHandler(ProxyHandler):
"""
This proxy handler manages data coming from upstream of the proxy. This
data usually comes from the origin service or it may come from another
proxy.
"""
def __init__(self, downstream, upstream, filter_pl, request):
super(UpstreamHandler, self).__init__(filter_pl, HttpResponse())
self._downstream = downstream
self._upstream = upstream
self._request = request
def on_status(self, status_code):
self._http_msg.status = str(status_code)
def on_headers_complete(self):
action = self._filter_pl.on_response_head(self._http_msg, self._request)
# If we are intercepting the response body do some negotiation
if self._filter_pl.intercepts_resp_body():
# If there's a content length, negotiate the transfer encoding
if self._http_msg.get_header('content-length'):
self._chunked = True
self._http_msg.remove_header('content-length')
self._http_msg.remove_header('transfer-encoding')
self._http_msg.header('transfer-encoding').values.append('chunked')
if action.is_rejecting():
self._intercepted = True
self._response_tuple = action.payload
else:
self._downstream.write(self._http_msg.to_bytes())
def on_body(self, bytes, length, is_chunked):
# Rejections simply discard the body
if not self._intercepted:
accumulator = AccumulationStream()
data = bytes
self._filter_pl.on_response_body(data, accumulator, self._request)
if accumulator.size() > 0:
data = accumulator.bytes
# Hold up on the upstream side until we're done sending this chunk
self._upstream.handle.disable_reading()
# When we write to the stream set the callback to resume
# reading from upstream.
_write_to_stream(
self._downstream,
data,
is_chunked or self._chunked,
self._upstream.handle.resume_reading)
def on_message_complete(self, is_chunked, keep_alive):
callback = self._upstream.close
self._upstream.handle.disable_reading()
if keep_alive:
self._http_msg = HttpResponse()
callback = self._downstream.handle.resume_reading
if self._intercepted:
# Serialize our message to them
self._downstream.write(self._http_msg.to_bytes(), callback)
elif is_chunked or self._chunked:
# Finish the last chunk.
self._downstream.write(_CHUNK_CLOSE, callback)
else:
callback()
class ConnectionTracker(object):
def __init__(self, on_stream_live, on_target_closed, on_target_error):
self._streams = dict()
self._target_in_use = None
self._on_stream_live = on_stream_live
self._on_target_closed = on_target_closed
self._on_target_error = on_target_error
def destroy(self):
for stream in self._streams.values():
if not stream.closed():
stream.close()
def connect(self, target):
self._target_in_use = target
live_stream = self._streams.get(target)
if live_stream:
# Make the cb ourselves since the socket's already connected
self._on_stream_live(live_stream)
else:
self._new_connection(target)
def _new_connection(self, target):
host, port, protocol = target
# Set up our upstream socket
us_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
# Create and bind the IO Handler based on selected protocol
if protocol == PROTOCOL_HTTP:
live_stream = SocketIOHandler(us_sock)
elif protocol == PROTOCOL_HTTPS:
live_stream = SSLSocketIOHandler(us_sock)
else:
raise Exception('Unknown protocol: {}.'.format(protocol))
# Store the stream reference for later use
self._streams[target] = live_stream
# Build and set the on_close callback
def on_close():
# Disable error cb on close
live_stream.on_error(None)
del self._streams[target]
if self._target_in_use == target:
self.destroy()
self._on_target_closed()
live_stream.on_close(on_close)
# Build and set the on_error callback
def on_error(error):
# Dsiable close cb on error
live_stream.on_close(None)
if self._target_in_use == target:
del self._streams[target]
if self._target_in_use == target:
self.destroy()
self._on_target_error(error)
live_stream.on_error(on_error)
# Build and set the on_connect callback and then connect
def on_connect():
self._on_stream_live(live_stream)
live_stream.connect((host, port), on_connect)
class ProxyConnection(object):
"""
A proxy connection manages the lifecycle of the sockets opened during a
proxied client request against Pyrox.
"""
def __init__(self, us_filter_pl, ds_filter_pl, downstream, router):
self._ds_filter_pl = ds_filter_pl
self._us_filter_pl = us_filter_pl
self._router = router
self._upstream_parser = None
self._upstream_tracker = ConnectionTracker(
self._on_upstream_live,
self._on_upstream_close,
self._on_upstream_error)
# Setup all of the wiring for downstream
self._downstream = downstream
self._downstream_handler = DownstreamHandler(
self._downstream,
self._ds_filter_pl,
self._connect_upstream)
self._downstream_parser = RequestParser(self._downstream_handler)
self._downstream.on_close(self._on_downstream_close)
self._downstream.read(self._on_downstream_read)
def _connect_upstream(self, request, route=None):
if route is not None:
# This does some type checking for routes passed up via filter
self._router.set_next(route)
upstream_target = self._router.get_next()
if upstream_target is None:
self._downstream.write(_UPSTREAM_UNAVAILABLE.to_bytes(),
self._downstream.handle.resume_reading)
return
# Hold downstream reads
self._hold_downstream = True
# Update the request to proxy upstream and store it
request.replace_header('host').values.append(
'{}:{}'.format(upstream_target[0], upstream_target[1]))
self._request = request
try:
self._upstream_tracker.connect(upstream_target)
except Exception as ex:
_LOG.exception(ex)
def _on_upstream_live(self, upstream):
self._upstream_handler = UpstreamHandler(
self._downstream,
upstream,
self._us_filter_pl,
self._request)
if self._upstream_parser:
self._upstream_parser.destroy()
self._upstream_parser = ResponseParser(self._upstream_handler)
# Set the read callback
upstream.read(self._on_upstream_read)
# Send the proxied request object
upstream.write(self._request.to_bytes())
# Drop the ref to the proxied request head
self._request = None
# Set up our downstream handler
self._downstream_handler.on_upstream_connect(upstream)
def _on_downstream_close(self):
self._upstream_tracker.destroy()
self._downstream_parser.destroy()
self._downstream_parser = None
def _on_downstream_error(self, error):
_LOG.error('Downstream error: {}'.format(error))
if not self._downstream.closed():
self._downstream.close()
def _on_upstream_error(self, error):
if not self._downstream.closed():
self._downstream.write(_BAD_GATEWAY_RESP.to_bytes())
def _on_upstream_close(self):
if not self._downstream.closed():
self._downstream.close()
if self._upstream_parser is not None:
self._upstream_parser.destroy()
self._upstream_parser = None
def _on_downstream_read(self, data):
try:
self._downstream_parser.execute(data)
except StreamClosedError:
pass
except Exception as ex:
_LOG.exception(ex)
def _on_upstream_read(self, data):
try:
self._upstream_parser.execute(data)
except StreamClosedError:
pass
except Exception as ex:
_LOG.exception(ex)
class TornadoHttpProxy(TCPServer):
"""
Subclass of the Tornado TCPServer that lets us set up the Pyrox proxy
orchestrations.
:param pipelines: This is a tuple with the upstream filter pipeline factory
as the first element and the downstream filter pipeline
factory as the second element.
"""
def __init__(self, pipeline_factories, default_us_targets=None,
ssl_options=None):
super(TornadoHttpProxy, self).__init__(ssl_options=ssl_options)
self._router = RoundRobinRouter(default_us_targets)
self.us_pipeline_factory = pipeline_factories[0]
self.ds_pipeline_factory = pipeline_factories[1]
def handle_stream(self, downstream, address):
connection_handler = ProxyConnection(
self.us_pipeline_factory(),
self.ds_pipeline_factory(),
downstream,
self._router)
```
#### File: jon-armstrong/pyrox/setup.py
```python
import os
import sys
import pyrox.about
from setuptools import setup, find_packages
from distutils.extension import Extension
try:
from Cython.Build import cythonize
has_cython = True
except ImportError:
has_cython = False
def read(relative):
contents = open(relative, 'r').read()
return [l for l in contents.split('\n') if l != '']
def compile_pyx():
ext_modules = list()
cparser = cythonize('pyrox/http/parser.pyx')[0]
cparser.sources.insert(0, 'include/http_el.c')
ext_modules.append(cparser)
ext_modules.extend(cythonize('pyrox/http/model_util.pyx'))
return ext_modules
# compiler flags
CFLAGS = ['-I', './include']
DEBUG = os.getenv('DEBUG')
if DEBUG and DEBUG.lower() == 'true':
CFLAGS.extend(['-D', 'DEBUG_OUTPUT'])
os.environ['CFLAGS'] = ' '.join(CFLAGS)
setup(
name='pyrox',
version=pyrox.about.VERSION,
description='The high-speed HTTP middleware proxy for Python',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/zinic/pyrox',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Cython',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet',
'Topic :: Utilities'
],
scripts=['scripts/pyrox'],
tests_require=read('tools/tests_require.txt'),
install_requires=read('tools/install_requires.txt'),
test_suite='nose.collector',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['*.tests']),
ext_modules=compile_pyx())
```
#### File: tests/filtering/pipeline_test.py
```python
import mock
import unittest
import pyrox.filtering as filtering
class TestFilterWithAllDecorators(filtering.HttpFilter):
def __init__(self):
self.on_req_head_called = False
self.on_req_body_called = False
self.on_resp_head_called = False
self.on_resp_body_called = False
def were_expected_calls_made(self):
return self.on_req_head_called and \
self.on_req_body_called and \
self.on_resp_head_called and \
self.on_resp_body_called
@filtering.handles_request_head
def on_req_head(self, request_head):
self.on_req_head_called = True
@filtering.handles_request_body
def on_req_body(self, body_part, output):
self.on_req_body_called = True
@filtering.handles_response_head
def on_resp_head(self, response_head):
self.on_resp_head_called = True
@filtering.handles_response_body
def on_resp_body(self, body_part, output):
self.on_resp_body_called = True
class WhenBuildingPipelines(unittest.TestCase):
def test_adding_filters(self):
pipeline = filtering.HttpFilterPipeline()
http_filter = TestFilterWithAllDecorators()
http_filter.on_req_head(mock.MagicMock())
pipeline.add_filter(http_filter)
pipeline.on_request_head(mock.MagicMock())
pipeline.on_request_body(mock.MagicMock(), mock.MagicMock())
pipeline.on_response_head(mock.MagicMock())
pipeline.on_response_body(mock.MagicMock(), mock.MagicMock())
self.assertTrue(http_filter.were_expected_calls_made())
class TestHttpFilterPipeline(unittest.TestCase):
def test_response_methods_pass_optional_request(self):
resp_head = mock.MagicMock()
resp_body = mock.MagicMock()
req_head = mock.MagicMock()
msg_part = mock.MagicMock()
out = mock.MagicMock()
assertEqual = self.assertEqual
class ResponseFilterUsesRequest():
def __init__(self):
self.on_resp_head_called = False
self.on_resp_body_called = False
def were_expected_calls_made(self):
return self.on_resp_head_called and \
self.on_resp_body_called
@filtering.handles_response_head
def on_response_head(self, response_head, request_head):
assertEqual(resp_head, response_head)
assertEqual(req_head, request_head)
self.on_resp_head_called = True
@filtering.handles_response_body
def on_response_body(self, message_part, output, request_head):
assertEqual(msg_part, message_part)
assertEqual(out, output)
assertEqual(req_head, request_head)
self.on_resp_body_called = True
pipeline = filtering.HttpFilterPipeline()
resp_filter = ResponseFilterUsesRequest()
pipeline.add_filter(resp_filter)
pipeline.on_response_head(resp_head, req_head)
pipeline.on_response_body(msg_part, out, req_head)
self.assertTrue(resp_filter.were_expected_calls_made())
if __name__ == '__main__':
unittest.main()
```
#### File: pyrox/tests/iohandling_test.py
```python
import socket
import unittest
import tornado
import mock
from pyrox.iohandling import *
class TornadoTestCase(unittest.TestCase):
def setUp(self):
self.io_loop = mock.MagicMock()
# Mock the FD interests
self.io_loop.ERROR = ERROR
self.io_loop.READ = READ
self.io_loop.WRITE = WRITE
def tearDown(self):
pass
class FileDescriptorChannelsTests(TornadoTestCase):
def setUp(self):
super(FileDescriptorChannelsTests, self).setUp()
self.fd_channel = FileDescriptorChannel(0, self.io_loop)
self.fd_channel.closed = lambda: False
def test_setting_handlers(self):
event_handler = mock.MagicMock()
self.fd_channel.set_handler(event_handler)
self.assertEqual(self.io_loop.ERROR,
self.fd_channel._event_interests)
self.assertTrue(self.fd_channel._has_handler)
self.io_loop.add_handler.assert_called_once_with(0,
event_handler, self.io_loop.ERROR)
def test_setting_handlers_on_closed_channels(self):
self.fd_channel.closed = lambda: True
event_handler = mock.MagicMock()
with self.assertRaises(ChannelError):
self.fd_channel.set_handler(event_handler)
def test_setting_handlers_twice(self):
event_handler = mock.MagicMock()
self.fd_channel.set_handler(event_handler)
with self.assertRaises(ChannelError):
self.fd_channel.set_handler(event_handler)
def test_removing_handlers(self):
event_handler = mock.MagicMock()
self.fd_channel.set_handler(event_handler)
self.fd_channel.remove_handler()
self.assertEqual(self.io_loop.ERROR,
self.fd_channel._event_interests)
self.assertFalse(self.fd_channel._has_handler)
self.io_loop.remove_handler.assert_called_once_with(0)
def test_read_interest_controls(self):
event_handler = mock.MagicMock()
error_and_read_interests = self.io_loop.ERROR | self.io_loop.READ
self.fd_channel.set_handler(event_handler)
self.fd_channel.enable_reads()
self.assertEqual(error_and_read_interests,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0,
error_and_read_interests)
self.assertTrue(self.fd_channel.reads_enabled())
self.fd_channel.disable_reads()
self.assertEqual(self.io_loop.ERROR,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0,
self.io_loop.ERROR)
self.assertFalse(self.fd_channel.reads_enabled())
def test_write_interest_controls(self):
event_handler = mock.MagicMock()
error_and_write_interests = self.io_loop.ERROR | self.io_loop.WRITE
self.fd_channel.set_handler(event_handler)
self.fd_channel.enable_writes()
self.assertEqual(error_and_write_interests,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0,
error_and_write_interests)
self.assertTrue(self.fd_channel.writes_enabled())
self.fd_channel.disable_writes()
self.assertEqual(self.io_loop.ERROR,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0,
self.io_loop.ERROR)
self.assertFalse(self.fd_channel.writes_enabled())
def test_error_interest_controls(self):
event_handler = mock.MagicMock()
self.fd_channel.set_handler(event_handler)
self.fd_channel.disable_errors()
self.assertFalse(self.fd_channel.errors_enabled())
self.assertEqual(0,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0, 0)
self.fd_channel.enable_errors()
self.assertEqual(self.io_loop.ERROR,
self.fd_channel._event_interests)
self.io_loop.update_handler.assert_called_with(0,
self.io_loop.ERROR)
self.assertTrue(self.fd_channel.errors_enabled())
class SocketChannelsTests(TornadoTestCase):
def setUp(self):
super(SocketChannelsTests, self).setUp()
self.socket = mock.MagicMock()
self.socket.fileno = lambda: 0
self.socket_channel = SocketChannel(self.socket, self.io_loop)
def test_init(self):
self.socket.setsockopt.assert_called_with(
socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setblocking.assert_called_with(0)
def test_recieving(self):
self.socket_channel.recv(4)
self.socket.recv.assert_called_with(4)
def test_recieving_into(self):
buffer = mock.MagicMock()
self.socket_channel.recv_into(buffer, 4)
self.socket.recv_into.assert_called_with(buffer, 4)
def test_sending(self):
self.socket_channel.send(b'test')
self.assertTrue(self.socket_channel.has_queued_send())
def test_sending_twice(self):
self.socket_channel.send(b'test')
with self.assertRaises(ChannelError):
self.socket_channel.send(b'test')
def test_flusing(self):
self.socket.send.return_value = 2
self.socket_channel.send(b'test')
self.assertFalse(self.socket_channel.flush())
self.assertTrue(self.socket_channel.flush())
def test_closing(self):
event_handler = mock.MagicMock()
self.socket_channel.set_handler(event_handler)
self.assertFalse(self.socket_channel.closed())
self.socket_channel.close()
self.assertTrue(self.socket_channel.closed())
self.io_loop.remove_handler.assert_called()
def test_getting_socket_errors(self):
self.socket_channel.error()
self.socket.getsockopt.assert_called_with(
socket.SOL_SOCKET, socket.SO_ERROR)
class WhenTesting(TornadoTestCase):
def test_magic(self):
socket = mock.MagicMock()
socket.fileno.return_value = 25
channel = SocketChannel(socket, io_loop=self.io_loop)
event_router = ChannelEventRouter(io_loop=self.io_loop)
event_router.register(channel)
self.io_loop.add_handler.assert_called_once_with(socket.fileno(),
mock.ANY, self.io_loop.ERROR)
if __name__ == '__main__':
unittest.main()
```
#### File: tests/stock_filters/keystone_meniscus_ftest.py
```python
from ConfigParser import ConfigParser
import unittest
import pyrox.http as http
import pyrox.filtering as http_filtering
from keystoneclient.v2_0 import client
import pynsive
_FTEST_CONFIG_KEY = 'keystone_meniscus_ftest'
class WhenFuncTestingKeystoneMeniscus(unittest.TestCase):
def setUp(self):
self.config = ConfigParser()
self.config.read("examples/config/pyrox.conf")
self.username = self.config.get(_FTEST_CONFIG_KEY, 'username')
self.password = self.config.get(_FTEST_CONFIG_KEY, 'password')
self.tenant_name = self.config.get(_FTEST_CONFIG_KEY, 'tenant_name')
self.auth_url = self.config.get(_FTEST_CONFIG_KEY, 'auth_url')
self.host = self.config.get(_FTEST_CONFIG_KEY, 'host')
self.tenant_id = self.config.get(_FTEST_CONFIG_KEY, 'tenant_id')
plugin_manager = pynsive.PluginManager()
plugin_manager.plug_into('examples/filter')
keystone_filter_plugin = pynsive.import_module(
'keystone_meniscus_example')
self.keystone_filter = keystone_filter_plugin.MeniscusKeystoneFilter()
def test_meniscus_keystone_returns_proxy_action(self):
url = "http://{host}:8080/v1/tenant/{tenant_id}".format(
host=self.host, tenant_id=self.tenant_id)
keystone = client.Client(username=self.username,
password=<PASSWORD>,
tenant_name=self.tenant_name,
auth_url=self.auth_url)
token = keystone.auth_token
req_message = http.HttpRequest()
req_message.url = url
req_message.method = 'GET'
req_message.version = "1.0"
auth_header = req_message.header(name="X-AUTH-TOKEN")
auth_header.values.append(token)
returned_action = self.keystone_filter.on_request(req_message)
self.assertEqual(returned_action.kind, http_filtering.NEXT_FILTER)
def test_meniscus_keystone_returns_reject_action(self):
url = "http://{host}:8080/v1/tenant/{tenant_id}".format(
host=self.host, tenant_id=self.tenant_id)
req_message = http.HttpRequest()
req_message.url = url
req_message.method = 'GET'
req_message.version = "1.0"
auth_header = req_message.header(name="X-AUTH-TOKEN")
auth_header.values.append('BAD_TOKEN')
returned_action = self.keystone_filter.on_request(req_message)
self.assertEqual(returned_action.kind, http_filtering.REJECT)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jonarrien/openstack-xenapi",
"score": 2
} |
#### File: os_xenapi/client/exception.py
```python
from os_xenapi.client.i18n import _
class OsXenApiException(Exception):
"""Base OsXenapi Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
message = self.msg_fmt % kwargs
self.message = message
super(OsXenApiException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class PluginRetriesExceeded(OsXenApiException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class PluginImageNotFound(OsXenApiException):
msg_fmt = _("Image (%(image_id)s) not found.")
class SessionLoginTimeout(OsXenApiException):
msg_fmt = _("Unable to log in to XenAPI (is the Dom0 disk full?)")
class InvalidImage(OsXenApiException):
msg_fmt = _("Image is invalid: details is - (%(details)s)")
class HostConnectionFailure(OsXenApiException):
msg_fmt = _("Failed connecting to host %(host_netloc)s")
class NotFound(OsXenApiException):
msg_fmt = _("Not found error: %s")
class VdiImportFailure(OsXenApiException):
msg_fmt = _("Failed importing VDI from VHD stream: vdi_ref=(%(vdi_ref)s)")
class VhdDiskTypeNotSupported(OsXenApiException):
msg_fmt = _("Not supported VHD disk type: type=(%(disk_type)s)")
class NoNetworkInterfaceInSameSegment(OsXenApiException):
msg_fmt = _("Can't find network interface in the same network as \
ip=(%(ip)s)")
class ExecuteCommandFailed(OsXenApiException):
msg_fmt = _("Execute command failed: cmd=(%(cmd)s)")
class GetInterfaceOnHIMNMacError(OsXenApiException):
msg_fmt = _("Cannot find eth matches mac: mac=(%(mac)s)")
```
#### File: os_xenapi/client/host_agent.py
```python
def version(session, uuid, dom_id, timeout):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout}
return session.call_plugin('agent.py', 'version', args)
def key_init(session, uuid, dom_id, timeout, pub=''):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout,
'pub': pub}
return session.call_plugin('agent.py', 'key_init', args)
def agent_update(session, uuid, dom_id, timeout, url='', md5sum=''):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout,
'url': url, 'md5sum': md5sum}
return session.call_plugin('agent.py', 'agentupdate', args)
def password(session, uuid, dom_id, timeout, enc_pass=''):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout,
'enc_pass': enc_pass}
return session.call_plugin('agent.py', 'password', args)
def inject_file(session, uuid, dom_id, timeout, b64_path='', b64_contents=''):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout,
'b64_path': b64_path, 'b64_contents': b64_contents}
return session.call_plugin('agent.py', 'inject_file', args)
def reset_network(session, uuid, dom_id, timeout):
args = {'id': uuid, 'dom_id': dom_id, 'timeout': timeout}
return session.call_plugin('agent.py', 'resetnetwork', args)
```
#### File: os_xenapi/client/host_management.py
```python
def set_host_enabled(session, enabled):
args = {"enabled": enabled}
return session.call_plugin('xenhost.py', 'set_host_enabled', args)
def get_host_uptime(session):
return session.call_plugin('xenhost.py', 'host_uptime', {})
def get_host_data(session):
return session.call_plugin('xenhost.py', 'host_data', {})
def get_pci_type(session, pci_device):
return session.call_plugin_serialized('xenhost.py', 'get_pci_type',
pci_device)
def get_pci_device_details(session):
return session.call_plugin_serialized('xenhost.py',
'get_pci_device_details')
```
#### File: os_xenapi/client/host_network.py
```python
def ovs_create_port(session, bridge, port, iface_id, mac, status):
args = {'cmd': 'ovs_create_port',
'args': {'bridge': bridge,
'port': port,
'iface-id': iface_id,
'mac': mac,
'status': status}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ovs_add_port(session, bridge, port):
args = {'cmd': 'ovs_add_port',
'args': {'bridge_name': bridge, 'port_name': port}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ovs_del_port(session, bridge, port):
args = {'cmd': 'ovs_del_port',
'args': {'bridge_name': bridge, 'port_name': port}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ovs_del_br(session, bridge_name):
args = {'cmd': 'ovs_del_br',
'args': {'bridge_name': bridge_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_add_if(session, bridge_name, interface_name):
args = {'cmd': 'brctl_add_if',
'args': {'bridge_name': bridge_name,
'interface_name': interface_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_del_if(session, bridge_name, interface_name):
args = {'cmd': 'brctl_del_if',
'args': {'bridge_name': bridge_name,
'interface_name': interface_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_del_br(session, bridge_name):
args = {'cmd': 'brctl_del_br',
'args': {'bridge_name': bridge_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_add_br(session, bridge_name):
args = {'cmd': 'brctl_add_br',
'args': {'bridge_name': bridge_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_set_fd(session, bridge_name, fd):
args = {'cmd': 'brctl_set_fd',
'args': {'bridge_name': bridge_name,
'fd': fd}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def brctl_set_stp(session, bridge_name, stp_opt):
args = {'cmd': 'brctl_set_stp',
'args': {'bridge_name': bridge_name,
'option': stp_opt}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ip_link_add_veth_pair(session, dev1_name, dev2_name):
args = {'cmd': 'ip_link_add_veth_pair',
'args': {'dev1_name': dev1_name,
'dev2_name': dev2_name}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ip_link_del_dev(session, device):
args = {'cmd': 'ip_link_del_dev',
'args': {'device_name': device}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ip_link_get_dev(session, device):
args = {'cmd': 'ip_link_get_dev',
'args': {'device_name': device}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ip_link_set_dev(session, device, option):
args = {'cmd': 'ip_link_set_dev',
'args': {'device_name': device,
'option': option}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def ip_link_set_promisc(session, device, promisc_option):
args = {'cmd': 'ip_link_set_promisc',
'args': {'device_name': device,
'option': promisc_option}
}
session.call_plugin_serialized('xenhost.py', 'network_config', args)
def fetch_all_bandwidth(session):
return session.call_plugin_serialized('bandwidth.py',
'fetch_all_bandwidth')
```
#### File: xapi.d/plugins/kernel.py
```python
import errno
import os
import shutil
import XenAPIPlugin
import dom0_pluginlib
dom0_pluginlib.configure_logging('kernel')
logging = dom0_pluginlib.logging
exists = dom0_pluginlib.exists
optional = dom0_pluginlib.optional
with_vdi_in_dom0 = dom0_pluginlib.with_vdi_in_dom0
KERNEL_DIR = '/boot/guest'
def _copy_vdi(dest, copy_args):
vdi_uuid = copy_args['vdi_uuid']
vdi_size = copy_args['vdi_size']
cached_image = copy_args['cached-image']
logging.debug("copying kernel/ramdisk file from %s to /boot/guest/%s",
dest, vdi_uuid)
filename = KERNEL_DIR + '/' + vdi_uuid
# Make sure KERNEL_DIR exists, otherwise create it
if not os.path.isdir(KERNEL_DIR):
logging.debug("Creating directory %s", KERNEL_DIR)
os.makedirs(KERNEL_DIR)
# Read data from /dev/ and write into a file on /boot/guest
of = open(filename, 'wb')
f = open(dest, 'rb')
# Copy only vdi_size bytes
data = f.read(vdi_size)
of.write(data)
if cached_image:
# Create a cache file. If caching is enabled, kernel images do not have
# to be fetched from glance.
cached_image = KERNEL_DIR + '/' + cached_image
logging.debug("copying kernel/ramdisk file from %s to /boot/guest/%s",
dest, cached_image)
cache_file = open(cached_image, 'wb')
cache_file.write(data)
cache_file.close()
logging.debug("Done. Filename: %s", cached_image)
f.close()
of.close()
logging.debug("Done. Filename: %s", filename)
return filename
def copy_vdi(session, args):
vdi = exists(args, 'vdi-ref')
size = exists(args, 'image-size')
cached_image = optional(args, 'cached-image')
# Use the uuid as a filename
vdi_uuid = session.xenapi.VDI.get_uuid(vdi)
copy_args = {'vdi_uuid': vdi_uuid,
'vdi_size': int(size),
'cached-image': cached_image}
filename = with_vdi_in_dom0(session, vdi, False,
lambda dev:
_copy_vdi('/dev/%s' % dev, copy_args))
return filename
def create_kernel_ramdisk(session, args):
# Creates a copy of the kernel/ramdisk image if it is present in the
# cache. If the image is not present in the cache, it does nothing.
cached_image = exists(args, 'cached-image')
image_uuid = exists(args, 'new-image-uuid')
cached_image_filename = KERNEL_DIR + '/' + cached_image
filename = KERNEL_DIR + '/' + image_uuid
if os.path.isfile(cached_image_filename):
shutil.copyfile(cached_image_filename, filename)
logging.debug("Done. Filename: %s", filename)
else:
filename = ""
logging.debug("Cached kernel/ramdisk image not found")
return filename
def _remove_file(filepath):
try:
os.remove(filepath)
except OSError as exc: # noqa
if exc.errno != errno.ENOENT:
raise
def remove_kernel_ramdisk(session, args):
"""Removes kernel and/or ramdisk from dom0's file system."""
kernel_file = optional(args, 'kernel-file')
ramdisk_file = optional(args, 'ramdisk-file')
if kernel_file:
_remove_file(kernel_file)
if ramdisk_file:
_remove_file(ramdisk_file)
return "ok"
if __name__ == '__main__':
XenAPIPlugin.dispatch({'copy_vdi': copy_vdi,
'create_kernel_ramdisk': create_kernel_ramdisk,
'remove_kernel_ramdisk': remove_kernel_ramdisk})
```
#### File: xapi.d/plugins/partition_utils.py
```python
from distutils.version import StrictVersion
import logging
import os
import re
import time
import dom0_pluginlib as pluginlib
import utils
pluginlib.configure_logging("disk_utils")
def wait_for_dev(session, dev_path, max_seconds):
for i in range(0, max_seconds):
if os.path.exists(dev_path):
return dev_path
time.sleep(1)
return ""
def _get_sfdisk_version():
out = utils.run_command(['/sbin/sfdisk', '-v'])
if out:
# Return the first two numbers from the version.
# In XS6.5, it's 2.13-pre7. Just return 2.13 for this case.
pattern = re.compile("(\d+)\.(\d+)")
match = pattern.search(out.split('\n')[0])
if match:
return match.group(0)
def make_partition(session, dev, partition_start, partition_end):
# Since XS7.0 which has sfdisk V2.23, we observe sfdisk has a bug
# that sfdisk will wrongly calculate cylinders when specify Sector
# as unit (-uS). That bug will cause the partition operation failed.
# And that's fixed in 2.26. So as a workaround, let's use the option
# of '--force' for version <=2.25 and >=2.23. '--force' will ignore
# the wrong cylinder value but works as expected.
VER_FORCE_MIN = '2.23'
VER_FORCE_MAX = '2.25'
dev_path = utils.make_dev_path(dev)
if partition_end != "-":
raise pluginlib.PluginError("Can only create unbounded partitions")
sfdisk_ver = _get_sfdisk_version()
cmd_list = ['sfdisk', '-uS', dev_path]
if sfdisk_ver:
if StrictVersion(sfdisk_ver) >= StrictVersion(VER_FORCE_MIN) and \
StrictVersion(sfdisk_ver) <= StrictVersion(VER_FORCE_MAX):
cmd_list = ['sfdisk', '--force', '-uS', dev_path]
utils.run_command(cmd_list, '%s,;\n' % (partition_start))
def _mkfs(fs, path, label):
"""Format a file or block device
:param fs: Filesystem type (only 'swap', 'ext3' supported)
:param path: Path to file or block device to format
:param label: Volume label to use
"""
if fs == 'swap':
args = ['mkswap']
elif fs == 'ext3':
args = ['mkfs', '-t', fs]
# add -F to force no interactive execute on non-block device.
args.extend(['-F'])
if label:
args.extend(['-L', label])
else:
raise pluginlib.PluginError("Partition type %s not supported" % fs)
args.append(path)
utils.run_command(args)
def mkfs(session, dev, partnum, fs_type, fs_label):
dev_path = utils.make_dev_path(dev)
out = utils.run_command(['kpartx', '-avspp', dev_path])
try:
logging.info('kpartx output: %s' % out)
mapperdir = os.path.join('/dev', 'mapper')
dev_base = os.path.basename(dev)
partition_path = os.path.join(mapperdir, "%sp%s" % (dev_base, partnum))
_mkfs(fs_type, partition_path, fs_label)
finally:
# Always remove partitions otherwise we can't unplug the VBD
utils.run_command(['kpartx', '-dvspp', dev_path])
if __name__ == "__main__":
utils.register_plugin_calls(wait_for_dev,
make_partition,
mkfs)
```
#### File: client/image/test_vdi_handler.py
```python
import mock
import eventlet
from six.moves import http_client as httplib
import tarfile
from os_xenapi.client import exception
from os_xenapi.client.image import vdi_handler
from os_xenapi.client.image import vhd_utils
from os_xenapi.client import utils
from os_xenapi.tests import base
class ImageStreamToVDIsTestCase(base.TestCase):
def setUp(self):
super(ImageStreamToVDIsTestCase, self).setUp()
self.context = mock.Mock()
self.session = mock.Mock()
self.instance = {'name': 'instance-001'}
self.host_url = "http://fake-host.com"
self.sr_ref = "fake-sr-ref"
self.stream = mock.Mock()
@mock.patch.object(tarfile, 'open')
@mock.patch.object(vhd_utils, 'VHDFileParser')
@mock.patch.object(vdi_handler.ImageStreamToVDIs, '_createVDI',
return_value='fake_vdi_ref')
@mock.patch.object(vdi_handler.ImageStreamToVDIs, '_vhd_stream_to_vdi')
def test_start(self, mock_to_vdi, mock_createVDI,
mock_get_parser, mock_open):
self.session.task.create.return_value = 'fake-task-ref'
mock_footer = mock.Mock(current_size=1073741824)
mock_parser = mock.Mock()
mock_get_parser.return_value = mock_parser
mock_parser.parse_vhd_footer.return_value = mock_footer
fake_vhd_info = mock.Mock()
fake_vhd_info.size = 29371904
fake_vhd_info.name = '0.vhd'
mock_tarfile = mock.MagicMock()
mock_tarfile.__enter__.return_value = mock_tarfile
mock_tarfile.__iter__.return_value = [fake_vhd_info]
mock_open.return_value = mock_tarfile
mock_tarfile.extractfile.return_value = 'fake-file-obj'
image_cmd = vdi_handler.ImageStreamToVDIs(self.context, self.session,
self.instance, self.host_url,
self.sr_ref, self.stream)
image_cmd.start()
self.session.task.create.assert_called_once_with(
'VDI_IMPORT_for_instance-001',
'Importing VDI for instance: instance-001')
mock_open.assert_called_once_with(mode="r|gz", fileobj=self.stream)
mock_tarfile.extractfile.assert_called_once_with(fake_vhd_info)
mock_createVDI.assert_called_once_with(self.session, self.instance,
1073741824)
mock_to_vdi.assert_called_once_with(mock_parser, 'fake_vdi_ref',
29371904)
self.session.VDI.get_uuid.assert_called_once_with('fake_vdi_ref')
@mock.patch.object(utils, 'create_vdi',
return_value='fake-vdi-ref')
def test_createVDI(self, mock_create_vdi):
virtual_size = 1073741824
image_cmd = vdi_handler.ImageStreamToVDIs(self.context, self.session,
self.instance, self.host_url,
self.sr_ref, self.stream)
expect_result = 'fake-vdi-ref'
result = image_cmd._createVDI(self.session, self.instance,
virtual_size)
mock_create_vdi.assert_called_once_with(self.session, 'fake-sr-ref',
self.instance, 'instance-001',
'root', virtual_size)
self.session.VDI.get_uuid.assert_called_once_with('fake-vdi-ref')
self.assertEqual(expect_result, result)
@mock.patch.object(utils, 'get_vdi_import_path',
return_value='fake-path')
@mock.patch.object(httplib.HTTPConnection, 'connect')
@mock.patch.object(httplib.HTTPConnection, 'request')
@mock.patch.object(httplib.HTTPConnection, 'send')
@mock.patch.object(httplib.HTTPConnection, 'getresponse')
@mock.patch.object(httplib.HTTPConnection, 'close')
def test_vhd_stream_to_vdi(self, conn_close, conn_getRes, conn_send,
conn_req, conn_connect, get_path):
vdh_stream = mock.Mock()
cache_size = 4 * 1024
remain_size = vdi_handler.CHUNK_SIZE / 2
file_size = cache_size + vdi_handler.CHUNK_SIZE * 2 + remain_size
headers = {'Content-Type': 'application/octet-stream',
'Content-Length': '%s' % file_size}
image_cmd = vdi_handler.ImageStreamToVDIs(self.context, self.session,
self.instance, self.host_url,
self.sr_ref, self.stream)
mock_parser = mock.Mock()
mock_parser.cached_buff = b'\x00' * cache_size
mock_parser.src_file = vdh_stream
image_cmd.task_ref = 'fake-task-ref'
vdh_stream.read.side_effect = ['chunk1', 'chunk2', 'chunk3']
image_cmd._vhd_stream_to_vdi(mock_parser, 'fake_vdi_ref', file_size)
conn_connect.assert_called_once_with()
get_path.assert_called_once_with(self.session, 'fake-task-ref',
'fake_vdi_ref')
conn_connect.assert_called_once_with()
conn_req.assert_called_once_with('PUT', 'fake-path', headers=headers)
expect_send_calls = [mock.call(mock_parser.cached_buff),
mock.call('chunk1'),
mock.call('chunk2'),
mock.call('chunk3'),
]
conn_send.assert_has_calls(expect_send_calls)
conn_getRes.assert_called_once_with()
conn_close.assert_called_once_with()
@mock.patch.object(utils, 'get_vdi_import_path',
return_value='fake-path')
@mock.patch.object(httplib.HTTPConnection, 'connect')
@mock.patch.object(httplib.HTTPConnection, 'request',
side_effect=Exception)
@mock.patch.object(httplib.HTTPConnection, 'send')
@mock.patch.object(httplib.HTTPConnection, 'getresponse')
@mock.patch.object(httplib.HTTPConnection, 'close')
def test_vhd_stream_to_vdi_put_except(self, conn_close, conn_getRes,
conn_send, conn_req, conn_connect,
get_path):
vdh_stream = mock.Mock()
cache_size = 4 * 1024
remain_size = vdi_handler.CHUNK_SIZE / 2
file_size = cache_size + vdi_handler.CHUNK_SIZE * 2 + remain_size
image_cmd = vdi_handler.ImageStreamToVDIs(self.context, self.session,
self.instance, self.host_url,
self.sr_ref, self.stream)
mock_parser = mock.Mock()
mock_parser.cached_buff = b'\x00' * cache_size
mock_parser.src_file = vdh_stream
image_cmd.task_ref = 'fake-task-ref'
vdh_stream.read.return_value = ['chunk1', 'chunk2', 'chunk3']
self.assertRaises(exception.VdiImportFailure,
image_cmd._vhd_stream_to_vdi, mock_parser,
'fake_vdi_ref', file_size)
@mock.patch.object(utils, 'get_vdi_import_path',
return_value='fake-path')
@mock.patch.object(httplib.HTTPConnection, 'connect',
side_effect=Exception)
@mock.patch.object(httplib.HTTPConnection, 'request')
@mock.patch.object(httplib.HTTPConnection, 'send')
@mock.patch.object(httplib.HTTPConnection, 'getresponse')
@mock.patch.object(httplib.HTTPConnection, 'close')
def test_vhd_stream_to_vdi_conn_except(self, conn_close, conn_getRes,
conn_send, conn_req, conn_connect,
get_path):
vdh_stream = mock.Mock()
cache_size = 4 * 1024
remain_size = vdi_handler.CHUNK_SIZE / 2
file_size = cache_size + vdi_handler.CHUNK_SIZE * 2 + remain_size
image_cmd = vdi_handler.ImageStreamToVDIs(self.context, self.session,
self.instance, self.host_url,
self.sr_ref, self.stream)
mock_parser = mock.Mock()
mock_parser.cached_buff = b'\x00' * cache_size
mock_parser.src_file = vdh_stream
image_cmd.task_ref = 'fake-task-ref'
vdh_stream.read.return_value = ['chunk1', 'chunk2', 'chunk3']
self.assertRaises(exception.HostConnectionFailure,
image_cmd._vhd_stream_to_vdi, mock_parser,
'fake_vdi_ref', file_size)
class GenerateImageStreamTestCase(base.TestCase):
def setUp(self):
super(GenerateImageStreamTestCase, self).setUp()
self.context = mock.Mock()
self.session = mock.Mock()
self.instance = {'name': 'instance-001'}
self.host_url = "http://fake-host.com"
self.stream = mock.Mock()
@mock.patch.object(utils, 'create_pipe')
@mock.patch.object(eventlet.GreenPool, 'spawn')
@mock.patch.object(vdi_handler.GenerateImageStream,
'start_image_stream_generator')
@mock.patch.object(eventlet.GreenPool, 'waitall')
def test_get_image_data(self, mock_waitall, mock_start, mock_spawn,
create_pipe):
mock_tarpipe_out = mock.Mock()
mock_tarpipe_in = mock.Mock()
create_pipe.return_value = (mock_tarpipe_out, mock_tarpipe_in)
image_cmd = vdi_handler.GenerateImageStream(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'])
mock_tarpipe_out.read.side_effect = ['chunk1', 'chunk2', '']
image_chunks = []
for chunk in image_cmd.get_image_data():
image_chunks.append(chunk)
create_pipe.assert_called_once_with()
mock_spawn.assert_called_once_with(mock_start, mock_tarpipe_in)
self.assertEqual(image_chunks, ['chunk1', 'chunk2'])
@mock.patch.object(vdi_handler, 'VdisToTarStream')
def test_start_stream_generator(self, mock_stream):
# Verify the specified compress level should be used,
# if a compresslevel specified in GenerateImageStream.
compr_level = 9
mock_stream_obj = mock.Mock()
mock_stream.return_value = mock_stream_obj
generator = vdi_handler.GenerateImageStream(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'], compresslevel=compr_level)
fake_tarpipe_in = mock.Mock()
generator.start_image_stream_generator(fake_tarpipe_in)
mock_stream.assert_called_once_with(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'],
fake_tarpipe_in, compr_level)
mock_stream_obj.start.assert_called_once_with()
fake_tarpipe_in.close.assert_called_once_with()
@mock.patch.object(vdi_handler, 'VdisToTarStream')
def test_start_stream_generator_abnormal_level(self, mock_stream):
# Verify the vdi_handler.DEFAULT_COMPRESSLEVEL should be used,
# if the compresslevel specified in GenerateImageStream
# is abnormal value (not in 1 - 9).
compr_level = 10
mock_stream_obj = mock.Mock()
mock_stream.return_value = mock_stream_obj
generator = vdi_handler.GenerateImageStream(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'], compresslevel=compr_level)
fake_tarpipe_in = mock.Mock()
generator.start_image_stream_generator(fake_tarpipe_in)
mock_stream.assert_called_once_with(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'],
fake_tarpipe_in, vdi_handler.DEFAULT_COMPRESSLEVEL)
mock_stream_obj.start.assert_called_once_with()
fake_tarpipe_in.close.assert_called_once_with()
@mock.patch.object(vdi_handler, 'VdisToTarStream')
def test_start_stream_generator_none_level(self, mock_stream):
# Verify the vdi_handler.DEFAULT_COMPRESSLEVEL should be used,
# if no compresslevel specified in GenerateImageStream.
mock_stream_obj = mock.Mock()
mock_stream.return_value = mock_stream_obj
generator = vdi_handler.GenerateImageStream(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'])
fake_tarpipe_in = mock.Mock()
generator.start_image_stream_generator(fake_tarpipe_in)
mock_stream.assert_called_once_with(
self.context, self.session, self.instance,
self.host_url, ['vdi_uuid'],
fake_tarpipe_in, vdi_handler.DEFAULT_COMPRESSLEVEL)
mock_stream_obj.start.assert_called_once_with()
fake_tarpipe_in.close.assert_called_once_with()
class VdisToTarStreamTestCase(base.TestCase):
def setUp(self):
super(VdisToTarStreamTestCase, self).setUp()
self.context = mock.Mock()
self.session = mock.Mock()
self.instance = {'name': 'instance-001'}
self.host_url = "http://fake-host.com"
self.stream = mock.Mock()
@mock.patch.object(tarfile.TarFile, 'gzopen')
@mock.patch.object(tarfile, 'TarInfo')
@mock.patch.object(vdi_handler.VdisToTarStream, '_connect_request',
return_value='fake-conn-resp')
@mock.patch.object(vhd_utils, 'VHDDynDiskParser')
@mock.patch.object(utils, 'create_pipe')
@mock.patch.object(vdi_handler.VdisToTarStream, 'convert_vhd_to_tar')
@mock.patch.object(eventlet.GreenPool, 'spawn')
@mock.patch.object(vdi_handler.VdisToTarStream, '_vhd_to_pipe')
@mock.patch.object(eventlet.GreenPool, 'waitall')
def test_start(self, mock_waitall, mock_to_pipe, mock_spawn,
mock_convert, mock_pipe, mock_parser,
mock_conn_req, mock_tarinfo, mock_open):
mock_tarfile = mock.MagicMock()
mock_tarfile.__enter__.return_value = mock_tarfile
mock_open.return_value = mock_tarfile
mock_tarinfo.return_value = mock.sentinel.tar_info
self.session.VDI.get_by_uuid.return_value = 'fake-vdi-ref'
mock_dynDisk = mock.Mock()
mock_parser.return_value = mock_dynDisk
mock_dynDisk.get_vhd_file_size.return_value = 29371904
vdi_uuids = ['vdi-uuid']
vhdpipe_in = mock.Mock()
mock_pipe.return_value = ('vhdpipe_out', vhdpipe_in)
compr_level = 5
image_cmd = vdi_handler.VdisToTarStream(
self.context, self.session, self.instance,
self.host_url, vdi_uuids, self.stream, compr_level)
image_cmd.start()
mock_open.assert_called_once_with(name=None, fileobj=self.stream,
mode='w', compresslevel=compr_level)
self.session.VDI.get_by_uuid.assert_called_once_with('vdi-uuid')
mock_conn_req.assert_called_once_with('fake-vdi-ref')
mock_dynDisk.get_vhd_file_size.assert_called_once_with()
mock_pipe.assert_called_once_with()
mock_spawn.assert_called_once_with(mock_convert, 'vhdpipe_out',
mock_tarfile,
mock.sentinel.tar_info)
mock_to_pipe.assert_called_once_with(mock_dynDisk, vhdpipe_in)
vhdpipe_in.close.asset_called_once_with()
mock_waitall.assert_called_once_with()
class AddVhdToTarTestCase(base.TestCase):
def setUp(self):
super(AddVhdToTarTestCase, self).setUp()
self.context = mock.Mock()
self.session = mock.Mock()
self.instance = {'name': 'instance-001'}
self.host_url = "http://fake-host.com"
self.stream = mock.Mock()
def test_add_stream_to_tar(self):
mock_tar_file = mock.Mock()
mock_tar_info = mock.Mock()
mock_tar_info.size = 8196
mock_tar_info.name = '0.vhd'
image_cmd = vdi_handler.AddVhdToTar(mock_tar_file, mock_tar_info,
'fake-vhdpipe-out')
image_cmd.start()
mock_tar_file.addfile.assert_called_once_with(
mock_tar_info, fileobj='fake-vhdpipe-out')
def test_add_stream_to_tar_IOError(self):
mock_tar_file = mock.Mock()
mock_tar_info = mock.Mock()
mock_tar_info.size = 1024
mock_tar_info.name = '0.vhd'
image_cmd = vdi_handler.AddVhdToTar(mock_tar_file, mock_tar_info,
'fake-vhdpipe-out')
mock_tar_file.addfile.side_effect = IOError
self.assertRaises(IOError, image_cmd.start)
```
#### File: client/image/test_vhd_utils.py
```python
import mock
import struct
from os_xenapi.client import exception as xenapi_except
from os_xenapi.client.image import vhd_utils
from os_xenapi.tests import base
class VhdUtilsTestCase(base.TestCase):
def test_VHDFooter(self):
ONE_GB = 1 * 1024 * 1024 * 1024
TYPE_DYNAMIC = 3
footer_data = b'\x00' * 48 + struct.pack('!Q', ONE_GB) + \
b'\x00' * 4 + \
b'\x00\x00\x00\x03'
vhd_footer = vhd_utils.VHDFooter(footer_data)
self.assertEqual(vhd_footer.raw_data, footer_data)
self.assertEqual(vhd_footer.current_size, ONE_GB)
self.assertEqual(vhd_footer.disk_type, TYPE_DYNAMIC)
def test_VHDDynDiskHdr(self):
BAT_OFFSET = 2048
MAX_BAT_ENTRIES = 512
SIZE_OF_DATA_BLOCK = 2 * 1024 * 1024
# Construct the DDH(Dynamical Disk Header) fields.
DDH_BAT_OFFSET = struct.pack('!Q', BAT_OFFSET)
DDH_MAX_BAT_ENTRIES = struct.pack('!I', MAX_BAT_ENTRIES)
DDH_BLOCK_SIZE = struct.pack('!I', SIZE_OF_DATA_BLOCK)
ddh_data = b'\x00' * 16 + DDH_BAT_OFFSET + \
b'\x00' * 4 + DDH_MAX_BAT_ENTRIES + \
DDH_BLOCK_SIZE
vhd_dynDiskHdr = vhd_utils.VHDDynDiskHdr(ddh_data)
self.assertEqual(vhd_dynDiskHdr.raw_data, ddh_data)
self.assertEqual(vhd_dynDiskHdr.bat_offset, BAT_OFFSET)
self.assertEqual(vhd_dynDiskHdr.bat_max_entries, MAX_BAT_ENTRIES)
self.assertEqual(vhd_dynDiskHdr.block_size, SIZE_OF_DATA_BLOCK)
def test_VHDBlockAllocTable(self):
MAX_BAT_ENTRIES = 512
# Construct BAT(Block Allocation Table)
# The non 0xffffffff means a valid BAT entry. Let's give some holes.
# At here the DATA_BAT contains 14 valid entries in the first 16
# 4-bytes units; there are 2 holes - 0xffffffff which should be
# ignored.
DATA_BAT = b'\x00\x00\x00\x08\x00\x00\x50\x0d\xff\xff\xff\xff' + \
b'\x00\x00\x10\x09\x00\x00\x20\x0a\x00\x00\x30\x0b' + \
b'\x00\x00\x40\x0c\xff\xff\xff\xff\x00\x00\x60\x0e' + \
b'\x00\x00\x70\x0f\x00\x00\x80\x10\x00\x00\x90\x11' + \
b'\x00\x00\xa0\x12\x00\x00\xb0\x13\x00\x00\xc0\x14' + \
b'\x00\x00\xd0\x15' + \
b'\xff\xff\xff\xff' * (MAX_BAT_ENTRIES - 16)
vhd_blockAllocTable = vhd_utils.VHDBlockAllocTable(DATA_BAT)
self.assertEqual(vhd_blockAllocTable.raw_data, DATA_BAT)
self.assertEqual(vhd_blockAllocTable.num_valid_bat_entries, 14)
class VhdFileParserTestCase(base.TestCase):
def test_get_disk_type_name(self):
disk_tyep_val = 3
expect_disk_type_name = 'Dynamic hard disk'
fake_file = 'fake_file'
vhdParser = vhd_utils.VHDFileParser(fake_file)
disk_type_name = vhdParser.get_disk_type_name(disk_tyep_val)
self.assertEqual(disk_type_name, expect_disk_type_name)
def test_get_vhd_file_size(self):
vhd_file = mock.Mock()
SIZE_OF_FOOTER = 512
SIZE_OF_DDH = 1024
SIZE_PADDING = 512
MAX_BAT_ENTRIES = 512
SIZE_OF_BAT_ENTRY = 4
SIZE_OF_BITMAP = 512
SIZE_OF_DATA_BLOCK = 2 * 1024 * 1024
VIRTUAL_SIZE = 40 * 1024 * 1024 * 1024
# Make fake data for VHD footer.
DATA_FOOTER = b'\x00' * 48 + struct.pack('!Q', VIRTUAL_SIZE)
# disk type is 3: dynamical disk.
DATA_FOOTER += b'\x00' * 4 + b'\x00\x00\x00\x03'
# padding bytes
padding_len = SIZE_OF_FOOTER - len(DATA_FOOTER)
DATA_FOOTER += b'\x00' * padding_len
# Construct the DDH(Dynamical Disk Header) fields.
DDH_BAT_OFFSET = struct.pack('!Q', 2048)
DDH_MAX_BAT_ENTRIES = struct.pack('!I', MAX_BAT_ENTRIES)
DDH_BLOCK_SIZE = struct.pack('!I', SIZE_OF_DATA_BLOCK)
DATA_DDH = b'\x00' * 16 + DDH_BAT_OFFSET
DATA_DDH += b'\x00' * 4 + DDH_MAX_BAT_ENTRIES
DATA_DDH += DDH_BLOCK_SIZE
# padding bytes for DDH
padding_len = SIZE_OF_DDH - len(DATA_DDH)
DATA_DDH += b'\x00' * padding_len
# Construct the padding bytes before the Block Allocation Table.
DATA_PADDING = b'\x00' * SIZE_PADDING
# Construct BAT(Block Allocation Table)
# The non 0xffffffff means a valid BAT entry. Let's give some holes.
# At here the DATA_BAT contains 14 valid entries in the first 16
# 4-bytes units; there are 2 holes - 0xffffffff which should be
# ignored.
DATA_BAT = b'\x00\x00\x00\x08\x00\x00\x50\x0d\xff\xff\xff\xff' + \
b'\x00\x00\x10\x09\x00\x00\x20\x0a\x00\x00\x30\x0b' + \
b'\x00\x00\x40\x0c\xff\xff\xff\xff\x00\x00\x60\x0e' + \
b'\x00\x00\x70\x0f\x00\x00\x80\x10\x00\x00\x90\x11' + \
b'\x00\x00\xa0\x12\x00\x00\xb0\x13\x00\x00\xc0\x14' + \
b'\x00\x00\xd0\x15' + \
b'\xff\xff\xff\xff' * (MAX_BAT_ENTRIES - 16)
expected_size = SIZE_OF_FOOTER * 2 + SIZE_OF_DDH
expected_size += SIZE_PADDING + SIZE_OF_BAT_ENTRY * MAX_BAT_ENTRIES
expected_size += (SIZE_OF_BITMAP + SIZE_OF_DATA_BLOCK) * 14
vhd_file.read.side_effect = [DATA_FOOTER,
DATA_DDH,
DATA_PADDING,
DATA_BAT]
vhd_parser = vhd_utils.VHDDynDiskParser(vhd_file)
vhd_size = vhd_parser.get_vhd_file_size()
read_call_list = vhd_file.read.call_args_list
expected = [mock.call(SIZE_OF_FOOTER),
mock.call(SIZE_OF_DDH),
mock.call(SIZE_PADDING),
mock.call(SIZE_OF_BAT_ENTRY * MAX_BAT_ENTRIES),
]
self.assertEqual(expected, read_call_list)
self.assertEqual(expected_size, vhd_size)
def test_not_dyn_disk_exception(self):
# If the VHD's disk type is not dynamic disk, it should raise
# exception.
SIZE_OF_FOOTER = 512
vhd_file = mock.Mock()
# disk type is 2: fixed disk.
DATA_FOOTER = b'\x00' * 60 + b'\x00\x00\x00\x02'
# padding bytes
padding_len = SIZE_OF_FOOTER - len(DATA_FOOTER)
DATA_FOOTER += b'\x00' * padding_len
vhd_file.read.return_value = DATA_FOOTER
self.assertRaises(xenapi_except.VhdDiskTypeNotSupported,
vhd_utils.VHDDynDiskParser, vhd_file)
```
#### File: tests/utils/test_conntrack_service.py
```python
import mock
import os
from os_xenapi.tests import base
from os_xenapi.utils import conntrack_service
class XenapiConntrackServiceTestCase(base.TestCase):
@mock.patch.object(os.path, 'dirname')
def test_ensure_conntrack_packages(self, mock_dirname):
client = mock.Mock()
client.ssh.return_value = (0, '/tmp/domu_sh.fake', '')
mock_dirname.return_value = '/fake_dir'
ssh_expect_call = [mock.call("mkdir -p /tmp/domu_sh.fake"),
mock.call("chmod +x /tmp/domu_sh.fake/"
"install_conntrack.sh"),
mock.call("/tmp/domu_sh.fake/install_conntrack.sh"),
mock.call("rm -rf /tmp/domu_sh.fake")]
conntrack_service.ensure_conntrack_packages(client)
client.ssh.assert_has_calls(ssh_expect_call)
client.scp.assert_called_once_with(
'/fake_dir/sh_tools/install_conntrack.sh',
'/tmp/domu_sh.fake/install_conntrack.sh')
@mock.patch.object(os.path, 'dirname')
@mock.patch.object(conntrack_service, 'ensure_conntrack_packages')
def test_enable_conntrack_service(self, mock_ensure_conntrack,
mock_dir_name):
client = mock.Mock()
client.ssh.return_value = (0, '/tmp/domu_sh.fake', '')
mock_dir_name.return_value = '/fake_dir'
ssh_expect_call = [mock.call("mkdir -p /tmp/domu_sh.fake"),
mock.call("chmod +x /tmp/domu_sh.fake/"
"enable_conntrack.sh"),
mock.call("/tmp/domu_sh.fake/enable_conntrack.sh"),
mock.call("rm -rf /tmp/domu_sh.fake")]
conntrack_service.enable_conntrack_service(client)
client.ssh.assert_has_calls(ssh_expect_call)
client.scp.assert_called_once_with(
'/fake_dir/sh_tools/enable_conntrack.sh',
'/tmp/domu_sh.fake/enable_conntrack.sh')
mock_ensure_conntrack.assert_called_once_with(client)
```
#### File: os_xenapi/utils/iptables.py
```python
import sys
from os_xenapi.client import exception
from os_xenapi.utils import common_function
from os_xenapi.utils import himn
from os_xenapi.utils import sshclient
OVS_NATIVE_TCP_PORT = '6640'
VXLAN_UDP_PORT = '4789'
def exit_with_error(err_msg):
sys.stderr.write(err_msg)
sys.exit(1)
def configure_dom0_iptables(client):
xs_chain = 'XenServer-Neutron-INPUT'
# Check XenServer specific chain, create if not exist
ret, out, err = execute_iptables_cmd('filter', '-L', xs_chain,
client=client,
allowed_return_codes=[0, 1])
if ret == 1:
execute_iptables_cmd('filter', '--new', xs_chain, client=client)
rule_spec = ('-j %s' % xs_chain)
execute_iptables_cmd('filter', '-I', 'INPUT', rule_spec=rule_spec,
client=client)
# Check XenServer rule for ovs native mode, create if not exist
rule_spec = ('-p tcp -m tcp --dport %s -j ACCEPT'
% OVS_NATIVE_TCP_PORT)
ensure_iptables('filter', xs_chain, rule_spec, client=client)
# Check XenServer rule for vxlan, create if not exist
rule_spec = ('-p udp -m multiport --dport %s -j ACCEPT'
% VXLAN_UDP_PORT)
ensure_iptables('filter', xs_chain, rule_spec, client=client)
# Persist iptables rules
client.ssh('service iptables save')
def configure_himn_forwards(forwarding_interfaces, dom0_himn_ip):
# enable forward
# make change to be persistent
common_function.execute(
'sed', '-i', 's/.*net\.ipv4\.ip_forward.*=.*/net.ipv4.ip_forward=1/g',
'/etc/sysctl.conf')
# make it to take effective now.
common_function.execute('sysctl', 'net.ipv4.ip_forward=1')
eth = himn.get_local_himn_eth(dom0_himn_ip)
if not eth:
raise exception.NoNetworkInterfaceInSameSegment(dom0_himn_ip)
for interface in forwarding_interfaces:
# allow traffic from HIMN and forward traffic
rule_spec = '-o ' + interface + ' -j MASQUERADE'
ensure_iptables('nat', 'POSTROUTING', rule_spec)
rule_spec = '-i ' + interface + ' -o ' + eth + ' -m state ' + \
'--state RELATED,ESTABLISHED -j ACCEPT'
ensure_iptables('filter', 'FORWARD', rule_spec)
rule_spec = '-i ' + eth + ' -o ' + interface + ' -j ACCEPT'
ensure_iptables('filter', 'FORWARD', rule_spec)
rule_spec = '-i ' + eth + ' -j ACCEPT'
ensure_iptables('filter', 'INPUT', rule_spec)
execute_iptables_cmd('filter', '-S', 'FORWARD')
execute_iptables_cmd('nat', '-S', 'POSTROUTING')
def ensure_iptables(table, chain, rule_spec, client=None):
ret, _, _ = execute_iptables_cmd(
table, '-C', chain, rule_spec=rule_spec, client=client,
allowed_return_codes=[0, 1])
# if the return value is 1, the rule is not exists
if ret == 1:
execute_iptables_cmd(table, '-I', chain, rule_spec=rule_spec,
client=client)
def execute_iptables_cmd(table, action, chain, rule_spec=None, client=None,
allowed_return_codes=[0]):
"""This function is used to run iptables command.
Users could run command to configure iptables for remote and local hosts.
If the user want to configure remote host, the session client is needed, or
the command would be run on local host.
:param table: table you want you configure.
:param rule_spec: rule spec you want to apply.
:param client: session client to remote host you want to configure.
:param expect_exception: When you just want to do a rule check, set this
flag to 'True'. Then the reture value would be 'Ture' or 'False'.
:param forwarding_interfaces: network interface list which user want to
forward HIMN packages.
"""
if client:
if not rule_spec:
rule_spec = ''
command = ('iptables -t %(table)s %(action)s %(chain)s %(rule_spec)s'
% {'table': table, 'action': action,
'chain': chain, 'rule_spec': rule_spec})
command = command.strip()
return client.ssh(command, allowed_return_codes=allowed_return_codes)
else:
if rule_spec:
rule_spec = rule_spec.split()
else:
rule_spec = []
command = ['iptables', '-t', table, action, chain] + rule_spec
return common_function.detailed_execute(
*command, allowed_return_codes=allowed_return_codes)
def config_iptables(client, forwarding_interfaces=None):
"""This function is used to configure iptables on a XenServer compute node.
:param client: session client with Dom0
:param forwarding_interfaces: network interface list which user want to
forward HIMN packages.
"""
if forwarding_interfaces:
configure_himn_forwards(forwarding_interfaces, client.ip)
configure_dom0_iptables(client)
if __name__ == '__main__':
if len(sys.argv) != 5:
exit_with_error("Wrong parameters input.")
dom0_himn_ip, user_name, password, forwarding_interfaces = sys.argv[1:]
forwarding_interfaces = forwarding_interfaces.split()
try:
client = sshclient.SSHClient(dom0_himn_ip, user_name, password)
except Exception:
exit_with_error("Create connection failed, ip: %(dom0_himn_ip)s,"
" user_name: %(user_name)s" %
{'dom0_himn_ip': dom0_himn_ip, 'user_name': user_name})
config_iptables(client, forwarding_interfaces)
``` |
{
"source": "JonarsLi/flask-restapi",
"score": 2
} |
#### File: flask-restapi/flask_restapi/commands.py
```python
import click
from flask.cli import AppGroup
from .tool import core, template
api_cli = AppGroup("api")
@api_cli.command("create")
@click.argument("name", required=True, type=str)
def create(name: str):
# Create app
core.create_directory(name)
core.create_file(f"{name}/__init__.py", template.get_init_template(name))
# Create routes
core.create_file(f"{name}/routes.py", template.get_routes_template())
# Create errors
core.create_file(f"{name}/errors.py", template.get_errors_template())
# Create views
core.create_directory(f"{name}/views")
core.create_file(f"{name}/views/__init__.py")
# Create specs
core.create_directory(f"{name}/specs")
core.create_file(f"{name}/specs/__init__.py")
# Create services
core.create_directory(f"{name}/services")
core.create_file(f"{name}/services/__init__.py")
```
#### File: flask-restapi/flask_restapi/core.py
```python
import functools
from typing import Any, Dict, Type
from flask import Flask, current_app, make_response, request
from pydantic import BaseModel
from . import commands
from .exceptions import ValidationErrorResponses
from .mixins import HandlerMixin, SpecMixin, AuthMixin
from .spec.core import Spec
from .spec.models import BlueprintMap, TagModel
from .types import RequestParametersType
class Api(SpecMixin, AuthMixin, HandlerMixin):
def __init__(self, app: Flask = None) -> None:
self.spec = Spec()
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app: Flask) -> None:
self.app = app
super().init_app()
self.app.before_first_request(self._register_spec)
with self.app.app_context():
self._register_blueprint()
self._register_handlers()
self.app.cli.add_command(commands.api_cli)
def bp_map(self, blueprint_name: str = None, endpoint_name: str = None):
"""Bind the URL endpoint to the blueprint name.
Args:
blueprint_name (str, optional): Flask blueprint name. Defaults to None.
endpoint_name (str, optional): Flask url endpoint name. Defaults to None.
"""
def decorator(cls):
blueprint_map = BlueprintMap(
endpoint_name=endpoint_name or cls.__name__.lower(), blueprint_name=blueprint_name
)
self.spec.blueprint_maps.append(blueprint_map)
return cls
return decorator
def header(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
tag: Type[TagModel] = None,
summary: str = None,
):
"""Receive request url path.
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
tag (Type[TagModel], optional): List of tags to each API operation. Defaults to None.
summary (str, optional): Override spec summary. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
_summary = summary or func.__doc__ or None
self.spec.store_parameters("header", schema, ep, _method_name, tag, _summary)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
_headers = dict((k.lower(), v) for k, v in request.headers.items())
request.parameters.header = schema(**_headers)
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def path(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
tag: Type[TagModel] = None,
summary: str = None,
):
"""Receive request url path
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
tag (Type[TagModel], optional): List of tags to each API operation. Defaults to None.
summary (str, optional): Override spec summary. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
_summary = summary or func.__doc__ or None
self.spec.store_parameters("path", schema, ep, _method_name, tag, _summary)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
request.parameters.path = schema(**request.view_args)
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def query(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
tag: Type[TagModel] = None,
summary: str = None,
):
"""Receive request query string.
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
tag (Type[TagModel], optional): List of tags to each API operation. Defaults to None.
summary (str, optional): Override spec summary. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
_summary = summary or func.__doc__ or None
self.spec.store_parameters("query", schema, ep, _method_name, tag, _summary)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
req_args = request.args.to_dict(flat=False)
normalize_query = {}
for key, value in req_args.items():
if len(value) > 1:
normalize_query.update({key: value})
else:
normalize_query.update({key: value[0]})
request.parameters.query = schema(**normalize_query)
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def body(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
content_type: list = ["application/json"],
tag: Type[TagModel] = None,
summary: str = None,
):
"""Receive request body.
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
content_type (list, optional): HTTP content-type. Defaults to "application/json".
tag (Type[TagModel], optional): List of tags to each API operation. Defaults to None.
summary (str, optional): Override spec summary. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
_summary = summary or func.__doc__ or None
self.spec.store_body(schema, ep, _method_name, content_type, tag, _summary)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
body: Any = request.get_json()
request.parameters.body = schema(**body)
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def form(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
content_type: list = ["multipart/form-data"],
tag: Type[TagModel] = None,
summary: str = None,
):
"""Receive request form data.
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
content_type (list, optional): HTTP content-type]. Defaults to "application/json".
tag (Type[TagModel], optional): List of tags to each API operation. Defaults to None.
summary (str, optional): Override spec summary. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
_summary = summary or func.__doc__ or None
self.spec.store_body(schema, ep, _method_name, content_type, tag, _summary)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
_form = {}
if request.files.to_dict():
_form.update(request.files.to_dict())
if request.form.to_dict():
_form.update(request.form.to_dict())
request.parameters.form = schema(**_form)
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def auth(self, endpoint: str = None, method_name: str = None):
"""Receive authorization token by headers. This auth decorator will get the Authorization of Flask request.headers and mark the endpoint on the spec as requiring verification.
Args:
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
self.spec.store_auth(ep, _method_name)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
auth_header = request.headers.get("Authorization")
if auth_header is not None:
if "Bearer" in auth_header:
_token = auth_header.split(" ")[1]
request.parameters.auth = _token
else:
request.parameters.auth = auth_header
return current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
return wrapper
return decorator
def response(
self,
schema: Type[BaseModel],
endpoint: str = None,
method_name: str = None,
content_type: list = ["application/json"],
headers: Dict[str, Any] = None,
code: int = 200,
default_validation_error: bool = True,
):
"""Make response schema to spec document and auto converted to dictionary.
Args:
schema (Type[BaseModel]): Models are classes which inherit from `BaseModel`.
endpoint (str, optional): Flask url endpoint name. Defaults to None.
method_name (str, optional): Endpoint method name. Defaults to None.
content_type (list, optional): HTTP content-type]. Defaults to "application/json".
headers (Dict[str, Any], optional): Response additional headers. Defaults to None.
code (int, optional): HTTP status code. Defaults to 200.
default_validation_error (bool, optional): Whether to show on spec. Defaults to True.
"""
def decorator(func):
ep = endpoint if endpoint else self._generate_endpoint(func.__qualname__)
_method_name = method_name or func.__name__
self.spec.store_responses(code, schema, ep, _method_name, content_type)
if default_validation_error:
self.spec.store_responses(422, ValidationErrorResponses, ep, _method_name, content_type)
@functools.wraps(func)
def wrapper(func_self=None, *args, **kwargs):
request.parameters = self._get_request_parameters()
result = current_app.ensure_sync(func)(func_self, request.parameters, **kwargs)
if isinstance(result, BaseModel):
response = make_response(result.dict(exclude={"headers"}), code)
else:
response = make_response(result, code)
# Add header from result
if hasattr(result, "headers"):
if isinstance(result.headers, dict):
for key, value in result.headers.items():
response.headers[key] = value
# Add header from decorator
if isinstance(headers, dict):
for key, value in headers.items():
response.headers[key] = value
return response
return wrapper
return decorator
def _get_request_parameters(self) -> RequestParametersType:
if not hasattr(request, "parameters"):
request.parameters = RequestParametersType()
return request.parameters
def _generate_endpoint(self, endpoint: str) -> str:
return endpoint.split(".")[0].lower()
``` |
{
"source": "JonarsLi/sanic-ext",
"score": 3
} |
#### File: extensions/injection/test_dependency.py
```python
from sanic import Request, text
class Foo:
def bar(self):
return "foobar"
def test_dependency_added(app):
foo = Foo()
foobar = Foo()
app.ext.dependency(foo)
app.ext.dependency(foobar, name="something")
assert app.ctx._dependencies.foo is foo
assert app.ctx._dependencies.something is foobar
def test_dependency_injection(app):
foo = Foo()
app.ext.dependency(foo)
@app.get("/getfoo")
async def getfoo(request: Request, foo: Foo):
return text(foo.bar())
_, response = app.test_client.get("/getfoo")
assert response.text == "foobar"
```
#### File: extensions/openapi/test_exclude.py
```python
from sanic import Blueprint, Request, Sanic, text
from sanic_ext.extensions.openapi import openapi
from utils import get_spec
def test_exclude_decorator(app: Sanic):
@app.route("/test0")
@openapi.exclude()
async def handler0(request: Request):
"""
openapi:
---
summary: This is a summary.
"""
return text("ok")
@app.route("/test1")
@openapi.definition(summary="This is a summary.", exclude=True)
async def handler1(request: Request):
return text("ok")
spec = get_spec(app)
paths = spec["paths"]
assert len(paths) == 0
def test_exclude_bp(app: Sanic):
bp1 = Blueprint("blueprint1")
bp2 = Blueprint("blueprint2")
@bp1.route("/op1")
@openapi.summary("handler 1")
async def handler1(request: Request):
return text("bp1, ok")
@bp2.route("/op2")
@openapi.summary("handler 2")
async def handler2(request: Request):
return text("bp2, ok")
app.blueprint(bp1)
app.blueprint(bp2)
openapi.exclude(bp=bp1)
spec = get_spec(app)
paths = spec["paths"]
assert len(paths) == 1
assert "/op2" in paths
assert not "/op1" in paths
assert paths["/op2"]["get"]["summary"] == "handler 2"
```
#### File: extensions/openapi/test_external_docs.py
```python
from sanic import Request, Sanic
from sanic.response import text
from sanic_ext import openapi
from sanic_ext.extensions.openapi.definitions import ExternalDocumentation
from utils import get_spec
def test_external_docs(app: Sanic):
@app.route("/test0")
@openapi.document("http://example.com/more", "Find more info here")
async def handler0(request: Request):
return text("ok")
@app.route("/test1")
@openapi.definition(
document=ExternalDocumentation(
"http://example.com/more", "Find more info here"
)
)
async def handler1(request: Request):
return text("ok")
@app.route("/test2")
@openapi.definition(document="http://example.com/more")
async def handler2(request: Request):
return text("ok")
@app.route("/test3")
async def handler3(request: Request):
"""
openapi:
---
summary: This is a summary.
externalDocs:
description: Find more info here
url: http://example.com/more
"""
return text("ok")
@app.route("/test4")
@openapi.document(
ExternalDocumentation("http://example.com/more", "Find more info here")
)
async def handler4(request: Request):
return text("ok")
spec = get_spec(app)
paths = spec["paths"]
assert len(paths) == 5
for i in range(5):
doc_obj = paths[f"/test{i}"]["get"]["externalDocs"]
assert doc_obj["url"] == "http://example.com/more"
if i != 2:
assert doc_obj["description"] == "Find more info here"
```
#### File: extensions/openapi/test_schema.py
```python
from sys import version_info
from typing import List
import pytest
from sanic_ext.extensions.openapi.types import Schema
@pytest.mark.skipif(version_info < (3, 9), reason="Not needed on 3.8")
def test_schema_list():
class Foo:
list1: List[int]
list2: list[int]
schema = Schema.make(Foo)
schema.serialize() == {
"type": "object",
"properties": {
"list1": {
"type": "array",
"items": {"type": "integer", "format": "int32"},
},
"list2": {
"type": "array",
"items": {"type": "integer", "format": "int32"},
},
},
}
``` |
{
"source": "Jonarzz/DotaResponsesRedditBot",
"score": 3
} |
#### File: DotaResponsesRedditBot/parsers/css_parser.py
```python
import json
import re
import requests
from rapidfuzz import process
from config import STYLESHEET_URL, FLAIR_REGEX, USER_AGENT
from util.database.database import db_api
__author__ = 'MePsyDuck'
def populate_heroes():
"""Method to update heroes in the Heroes table with hero names and proper css classes names as
taken from the DotA2 subreddit and hero flair images from the reddit directory.
Uses rapidfuzz for fuzzy matching of hero names to name found in `.flair-name` property in css.
"""
hero_names = db_api.get_all_hero_names()
response = requests.get(STYLESHEET_URL, headers={'User-Agent': USER_AGENT})
r = json.loads(response.text)
stylesheet = r['data']['stylesheet']
r = re.compile(FLAIR_REGEX)
for flair in r.finditer(stylesheet):
flair_css = flair['css_class']
img_path = flair['img_path']
flair_hero = img_path[6:]
match, confidence = process.extractOne(flair_hero, hero_names)
if confidence >= 90:
db_api.update_hero(hero_name=match, img_path=img_path, flair_css=flair_css)
```
#### File: DotaResponsesRedditBot/tests/test_bot.py
```python
import unittest
import config
from bot import account
from bot import worker
__author__ = 'Jonarzz'
__maintainer__ = 'MePsyDuck'
class BotWorkerTest(unittest.TestCase):
"""Class used to test bot worker module.
Inherits from TestCase class of unittest module.
"""
def test_parse_comment(self):
"""Method that tests the process_text method from worker module.
"""
self.assertEqual(worker.process_text(
"That's a great idea!!!"), "that s a great idea")
self.assertEqual(worker.process_text(" WoNdErFuL "), "wonderful")
self.assertEqual(worker.process_text("How are you?"), "how are you")
self.assertEqual(worker.process_text(
"Isn't is good to have quotes? you can add any response in quote and bot would still \n\n> reply to them"),
"reply to them")
self.assertEqual(worker.process_text(
"> multiple quotes \n\n > but reply to \n\n > only first one"), "multiple quotes")
def test_account(self):
"""Method used to test the Reddit instance returned by get_account()
"""
reddit = account.get_account()
self.assertEqual(reddit.user.me(), config.USERNAME)
```
#### File: util/caching/caching.py
```python
from abc import ABC, abstractmethod
__author__ = 'MePsyDuck'
class CacheAPI(ABC):
@abstractmethod
def _exists(self, key):
pass
@abstractmethod
def _set(self, key):
pass
def exists(self, thing_id):
"""Check if Reddit thing (currently comment/submission) is already processed/replied.
If it is not in the cache, it adds the thing_id to cache.
:param thing_id: They id of comment/submission to be cached.
:returns: `True` if replyable exists, else `False`.
"""
if self._exists(thing_id):
return True
else:
self._set(thing_id)
return False
```
#### File: util/caching/db_cache.py
```python
from util.caching.caching import CacheAPI
from util.database.database import db_api
__author__ = 'MePsyDuck'
class DBCache(CacheAPI):
def _exists(self, key):
"""Method to check if key exists in DB cache.
:param key: The `key` to to be checked in DB cache.
:return: `True` if `key` exist in DB cache.
"""
return db_api.check_if_thing_exists(key)
def _set(self, key):
"""Method to set `key` with `value` in DB cache.
:param key: The `key` (thing_id) to be added to DB cache.
"""
db_api.add_thing_to_cache(key)
```
#### File: DotaResponsesRedditBot/util/response_request.py
```python
from util.str_utils import preprocess_text
import requests
def request_cargo_set(url):
web_request = requests.get(url)
web_json = web_request.json()
cargo_set = set()
for objects in web_json['cargoquery']:
cargo_set.add(preprocess_text(objects['title']['title']))
return cargo_set
``` |
{
"source": "Jonarzz/LastfmLovedToSpotifyPlaylist",
"score": 3
} |
#### File: Jonarzz/LastfmLovedToSpotifyPlaylist/lastfm_to_spotify.py
```python
import sys
import lastfm
import spotify
__author__ = 'Jonasz'
def lastfm_fav_to_spotify_playlist():
"""Main method of the project that brings together other modules that are using APIs."""
(loved_tracks, spotify_username, playlist_name) = extract_variables()
try:
token = spotify.generate_token()
except spotify.TokenGenerationException:
print('Error generating token.') # GUI => dialog window
else:
sp = spotify.create_spotify_object(token)
tracks_ids = spotify.create_spotify_tracks_ids_list_from_loved(loved_tracks, sp)
playlist_id = spotify.create_playlist_for_user(sp, spotify_username, playlist_name)
spotify.add_tracks_to_playlist(sp, spotify_username, playlist_id, tracks_ids)
def extract_variables():
"""Method that extracts variables given as arguments when running the script
and returns:
- loved_tracks (list of dictionaries - read the doc of
lastfm.get_loved_tracks_list method for further information)
- spotify_username (given as argument)
- playlist_name (given as argument)
"""
try:
lastfm_user = lastfm.get_lastfm_user(sys.argv[1], sys.argv[2])
except lastfm.WrongCredentialsException:
print('Wrong LastFM credentials.') # GUI => dialog window
input('Press any key.')
return
loved_tracks = lastfm.get_loved_tracks_list(lastfm_user)
spotify_username = sys.argv[3]
playlist_name = sys.argv[4]
return loved_tracks, spotify_username, playlist_name
if __name__ == '__main__':
lastfm_fav_to_spotify_playlist()
```
#### File: LastfmLovedToSpotifyPlaylist/my_spotipy/util.py
```python
from __future__ import print_function
import webbrowser
from spotipy import oauth2
import spotipy
from callback_server import callback_server
def prompt_for_user_token(scope=None, client_id=None,
client_secret=None, redirect_uri=None):
""" Prompts the user to login if necessary and returns
the user token suitable for use with the spotipy.Spotify
constructor
Parameters:
- scope - the desired scope of the request
- client_id - the client id of your app
- client_secret - the client secret of your app
- redirect_uri - the redirect URI of your app
"""
if not client_id:
raise spotipy.SpotifyException(550, -1, 'No credentials set')
sp_oauth = oauth2.SpotifyOAuth(client_id, client_secret, redirect_uri, scope=scope)
auth_url = sp_oauth.get_authorize_url()
cb_server = callback_server.CallbackServer()
webbrowser.open(auth_url)
cb_server.wait_for_request()
response = cb_server.url
code = sp_oauth.parse_response_code(response)
token_info = sp_oauth.get_access_token(code)
return token_info['access_token']
```
#### File: Jonarzz/LastfmLovedToSpotifyPlaylist/spotify.py
```python
import spotipy
import my_spotipy.util as util
import properties
__author__ = 'Jonarzz'
class TokenGenerationException(Exception):
"""Exception raised when token generation fails
(e.g. wrong username, no access granted from the user)."""
pass
def generate_token():
"""Method that returns a spotipy Spotify API token for a given username, if the user
gave the permission to connect the app with their account.
If the token could not be generated, TokenGenerationException is raised."""
return util.prompt_for_user_token(properties.MODIFY_PLAYLISTS_SCOPE,
properties.SPOTIFY_API_ID,
properties.SPOTIFY_API_SECRET,
properties.SPOTIFY_REDIRECT_URL)
def create_spotify_object(token):
"""Method that returns a spotipy.Spotify object created using given token."""
spotify_obj = spotipy.Spotify(auth=token)
spotify_obj.trace = False
return spotify_obj
def create_spotify_tracks_ids_list_from_loved(loved_tracks, spotify_obj):
"""Method that returns a list of Spotify tracks IDs for a given LastFM loved_tracks
dictionaries list and authorized spotipy.Spotify object. Prints progress of creation
the list in percents to the console."""
tracks_ids = []
number_of_loved_tracks = len(loved_tracks)
done_tracks = 0
for track in loved_tracks:
done_tracks += 1
calculate_progress(done_tracks, number_of_loved_tracks, print_progress=True)
search_query = create_search_query(track)
track_id = get_track_id_from_search_query(spotify_obj, search_query, track['artist'])
if track_id:
tracks_ids.append(track_id)
return tracks_ids
def create_playlist_for_user(spotify_obj, spotify_username, playlist_name):
"""Method that creates a playlist with given name for given username, using authorized
spotipy.Spotify object. Created playlist ID is returned."""
playlist = spotify_obj.user_playlist_create(spotify_username, playlist_name)
return playlist['id']
def add_tracks_to_playlist(spotify_obj, spotify_username, playlist_id, tracks_ids,
tracks_per_requests=100):
"""Method that adds tracks with given Spotify tracks IDs to Spotify user's playlist
with a given playlist ID. Spotipy.Spotify object is used to add the tracks.
Maximum tracks per request in Spotify API is 100 and the same number is set in the method
by default. Can be changed to a number below 100."""
results = []
for tracks_chunk in [tracks_ids[i:i + tracks_per_requests] for i in
range(0, len(tracks_ids), tracks_per_requests)]:
results.append(spotify_obj.user_playlist_add_tracks(spotify_username,
playlist_id,
tracks_chunk))
return results
def calculate_progress(done_tracks, number_of_loved_tracks, print_progress=False):
"""Method that calculates and may print (changeable by print argument, False by default)
progress of list creation in XX.XX% format."""
output = '{0:.2f}%'.format(done_tracks / number_of_loved_tracks * 100)
if print_progress:
print(output)
return output
def create_search_query(track):
"""Method that can be used to create a search query passed to the Spotify API.
The track argument is a dictionary in such format:
{'artist': artist-name, 'title': track-title}"""
artist = track['artist']
title = track['title']
for ending in properties.TITLE_ENDINGS_TO_CUT:
if title.find(ending) != -1:
title = title[:title.find(ending)]
artist = artist.strip()
title = title.strip()
return artist + ' ' + title
def get_track_id_from_search_query(spotify_obj, search_query, artist_name):
"""Method that returns a track ID returned from the search using Spotify API
(managed by spotipy.Spotify object passed to the method) for a given search query
and expected artist name (or the first result's ID, if the name was not found)."""
results = spotify_obj.search(search_query)
try:
for item in results['tracks']['items']:
if item['artists'][0]['name'] == artist_name:
return item['id']
return results['tracks']['items'][0]['id']
except IndexError:
return None
``` |
{
"source": "Jonas1015/codingpride",
"score": 2
} |
#### File: codingpride/answer/models.py
```python
from uuid import uuid4
from ckeditor_uploader.fields import RichTextUploadingField
from django.conf import settings
from django.db import models
from question.models import *
User = settings.AUTH_USER_MODEL
class Answer(models.Model):
question = models.ForeignKey(
Question, related_name='answers', on_delete=models.CASCADE)
description = RichTextUploadingField(blank=False)
slug = models.SlugField(max_length=250)
date_answered = models.DateTimeField(
auto_now_add=True, verbose_name="date published")
date_updated = models.DateTimeField(
auto_now=True, verbose_name="date updated")
author = models.ForeignKey(
settings.AUTH_USER_MODEL, on_delete=models.SET_NULL,
blank=True, null=True)
def __str__(self):
return self.question.title + " - " + "Answer" + " - " + str(self.id)
def save(self, *args, **kwargs):
self.slug = slugify(
'answer - ' + self.question.title) + "-" + str(uuid4())
return super().save(*args, **kwargs)
``` |
{
"source": "Jonas1015/ShoppySm",
"score": 2
} |
#### File: ShoppySm/accounts/decorators.py
```python
from django.core.exceptions import PermissionDenied
from accounts.models import *
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required, user_passes_test
# ======================== Check HR role =======================================
s_login_required = user_passes_test(lambda u: True if u.is_shopkeeper else False)
def shopkeeper_login_required(view_func):
decorated_view_func = login_required(s_login_required(view_func))
return decorated_view_func
# ============================= Check Dean Role ================================
a_login_required = user_passes_test(lambda u: True if u.is_admin else False)
def admin_login_required(view_func):
decorated_view_func = login_required(a_login_required(view_func))
return decorated_view_func
# ========================= Check Officer role =================================
# o_login_required = user_passes_test(lambda u: True if u.is_officer else False)
#
# def officer_login_required(view_func):
# decorated_view_func = login_required(o_login_required(view_func))
# return decorated_view_func
# ========================= Check Superuser role =================================
super_login_required = user_passes_test(lambda u: True if u.is_superuser else False)
def superuser_login_required(view_func):
decorated_view_func = login_required(super_login_required(view_func))
return decorated_view_func
``` |
{
"source": "Jonas1302/python-mattermost-api",
"score": 2
} |
#### File: src/mattermostdriver/driver.py
```python
import asyncio
import logging
import warnings
from .client import Client
from .websocket import Websocket
from .endpoints.brand import Brand
from .endpoints.channels import Channels
from .endpoints.cluster import Cluster
from .endpoints.commands import Commands
from .endpoints.compliance import Compliance
from .endpoints.files import Files
from .endpoints.ldap import LDAP
from .endpoints.oauth import OAuth
from .endpoints.posts import Posts
from .endpoints.preferences import Preferences
from .endpoints.reactions import Reactions
from .endpoints.saml import SAML
from .endpoints.system import System
from .endpoints.teams import Teams
from .endpoints.users import Users
from .endpoints.webhooks import Webhooks
from .endpoints.elasticsearch import Elasticsearch
from .endpoints.emoji import Emoji
from .endpoints.data_retention import DataRetention
from .endpoints.roles import Roles
from .endpoints.status import Status
log = logging.getLogger('mattermostdriver.api')
log.setLevel(logging.INFO)
class Driver:
"""
Contains the client, api and provides you with functions for
login, logout and initializing a websocket connection.
"""
default_options = {
'scheme': 'https',
'url': 'localhost',
'port': 8065,
'basepath': '/api/v4',
'verify': True,
'timeout': 30,
'request_timeout': None,
'login_id': None,
'password': None,
'token': None,
'mfa_token': None,
'auth': None,
'debug': False
}
"""
Required options
- url
Either
- login_id
- password
Or
- token (https://docs.mattermost.com/developer/personal-access-tokens.html)
Optional
- scheme ('https')
- port (8065)
- verify (True)
- timeout (30)
- request_timeout (None)
- mfa_token (None)
- auth (None)
- debug (False)
Should not be changed
- basepath ('/api/v4') - unlikely this would do any good
"""
def __init__(self, options=default_options, client_cls=Client):
"""
:param options: A dict with the values from `default_options`
:type options: dict
"""
if options is None:
options = self.default_options
self.options = self.default_options.copy()
self.options.update(options)
self.driver = self.options
if self.options['debug']:
log.setLevel(logging.DEBUG)
log.warning('Careful!!\nSetting debug to True, will reveal your password in the log output if you do driver.login()!\nThis is NOT for production!')
self.client = client_cls(self.options)
self._api = {
'users': Users(self.client),
'teams': Teams(self.client),
'channels': Channels(self.client),
'posts': Posts(self.client),
'files': Files(self.client),
'preferences': Preferences(self.client),
'status': Status(self.client),
'emoji': Emoji(self.client),
'reactions': Reactions(self.client),
'system': System(self.client),
'webhooks': Webhooks(self.client),
'commands': Commands(self.client),
'compliance': Compliance(self.client),
'cluster': Cluster(self.client),
'brand': Brand(self.client),
'oauth': OAuth(self.client),
'roles': Roles(self.client),
'saml': SAML(self.client),
'ldap': LDAP(self.client),
'elasticsearch': Elasticsearch(self.client),
'data_retention': DataRetention(self.client),
}
self.websocket = None
def init_websocket(self, event_handler, websocket_cls=Websocket):
"""
Will initialize the websocket connection to the mattermost server.
This should be run after login(), because the websocket needs to make
an authentification.
See https://api.mattermost.com/v4/#tag/WebSocket for which
websocket events mattermost sends.
Example of a really simple event_handler function
.. code:: python
@asyncio.coroutine
def my_event_handler(message):
print(message)
:param event_handler: The function to handle the websocket events. Takes one argument.
:type event_handler: Function(message)
:return: The event loop
"""
self.websocket = websocket_cls(self.options, self.client.token)
loop = asyncio.get_event_loop()
loop.run_until_complete(self.websocket.connect(event_handler))
return loop
def login(self):
"""
Logs the user in.
The log in information is saved in the client
- userid
- username
- cookies
:return: The raw response from the request
"""
if self.options['token']:
self.client.token = self.options['token']
result = self.users.get_user('me')
else:
response = self.users.login_user({
'login_id': self.options['login_id'],
'password': <PASSWORD>['password'],
'token': self.options['mfa_token']
})
if response.status_code == 200:
self.client.token = response.headers['Token']
self.client.cookies = response.cookies
try:
result = response.json()
except ValueError:
log.debug('Could not convert response to json, returning raw response')
result = response
log.debug(result)
if 'id' in result:
self.client.userid = result['id']
if 'username' in result:
self.client.username = result['username']
return result
def logout(self):
"""
Log the user out.
:return: The JSON response from the server
"""
result = self.users.logout_user()
self.client.token = ''
self.client.userid = ''
self.client.username = ''
self.client.cookies = None
return result
@property
def api(self):
"""
.. deprecated:: 4.0.2
Use the endpoints directly instead.
:return: dictionary containing the endpoints
:rtype: dict
"""
warnings.warn('Deprecated for 5.0.0. Use the endpoints directly instead.', DeprecationWarning)
return self._api
@property
def users(self):
"""
Api endpoint for users
:return: Instance of :class:`~endpoints.users.Users`
"""
return Users(self.client)
@property
def teams(self):
"""
Api endpoint for teams
:return: Instance of :class:`~endpoints.teams.Teams`
"""
return Teams(self.client)
@property
def channels(self):
"""
Api endpoint for channels
:return: Instance of :class:`~endpoints.channels.Channels`
"""
return Channels(self.client)
@property
def posts(self):
"""
Api endpoint for posts
:return: Instance of :class:`~endpoints.posts.Posts`
"""
return Posts(self.client)
@property
def files(self):
"""
Api endpoint for files
:return: Instance of :class:`~endpoints.files.Files`
"""
return Files(self.client)
@property
def preferences(self):
"""
Api endpoint for preferences
:return: Instance of :class:`~endpoints.preferences.Preferences`
"""
return Preferences(self.client)
@property
def emoji(self):
"""
Api endpoint for emoji
:return: Instance of :class:`~endpoints.emoji.Emoji`
"""
return Emoji(self.client)
@property
def reactions(self):
"""
Api endpoint for posts' reactions
:return: Instance of :class:`~endpoints.reactions.Reactions`
"""
return Reactions(self.client)
@property
def system(self):
"""
Api endpoint for system
:return: Instance of :class:`~endpoints.system.System`
"""
return System(self.client)
@property
def webhooks(self):
"""
Api endpoint for webhooks
:return: Instance of :class:`~endpoints.webhooks.Webhooks`
"""
return Webhooks(self.client)
@property
def compliance(self):
"""
Api endpoint for compliance
:return: Instance of :class:`~endpoints.compliance.Compliance`
"""
return Compliance(self.client)
@property
def cluster(self):
"""
Api endpoint for cluster
:return: Instance of :class:`~endpoints.cluster.Cluster`
"""
return Cluster(self.client)
@property
def brand(self):
"""
Api endpoint for brand
:return: Instance of :class:`~endpoints.brand.Brand`
"""
return Brand(self.client)
@property
def oauth(self):
"""
Api endpoint for oauth
:return: Instance of :class:`~endpoints.oauth.OAuth`
"""
return OAuth(self.client)
@property
def saml(self):
"""
Api endpoint for saml
:return: Instance of :class:`~endpoints.saml.SAML`
"""
return SAML(self.client)
@property
def ldap(self):
"""
Api endpoint for ldap
:return: Instance of :class:`~endpoints.ldap.LDAP`
"""
return LDAP(self.client)
@property
def elasticsearch(self):
"""
Api endpoint for elasticsearch
:return: Instance of :class:`~endpoints.elasticsearch.Elasticsearch`
"""
return Elasticsearch(self.client)
@property
def data_retention(self):
"""
Api endpoint for data_retention
:return: Instance of :class:`~endpoints.data_retention.DataRetention`
"""
return DataRetention(self.client)
@property
def status(self):
"""
Api endpoint for status
:return: Instance of :class:`~endpoints.status.Status`
"""
return Status(self.client)
@property
def commands(self):
"""
Api endpoint for commands
:return: Instance of :class:`~endpoints.commands.Commands`
"""
return Commands(self.client)
@property
def roles(self):
"""
Api endpoint for roles
:return: Instance of :class:`~endpoints.roles.Roles`
"""
return Roles(self.client)
``` |
{
"source": "Jonas164/PSpaMM",
"score": 2
} |
#### File: architectures/arm/generator.py
```python
from codegen.architectures.arm.operands import *
from codegen.generator import *
from codegen.precision import *
from codegen.sugar import *
class Generator(AbstractGenerator):
template = """
void {funcName} (const {real_type}* A, const {real_type}* B, {real_type}* C, {real_type} alpha, {real_type} beta, const {real_type}* prefetch) {{{{
__asm__ __volatile__(
"ldr x0, %0\\n\\t"
"ldr x1, %1\\n\\t"
"ldr x2, %2\\n\\t"
"ldr x3, %3\\n\\t"
"ldr x4, %4\\n\\t"
"dup v0.2d, x3\\n\\t"
"dup v1.2d, x4\\n\\t"
{body_text}
: : "m"(A), "m"(B), "m"(C), "m"(alpha), "m"(beta) : {clobbered});
}}}};"""
def get_v_size(self):
if self.precision == Precision.DOUBLE:
return 2
raise NotImplementedError
def get_template(self):
return Generator.template
def make_reg_blocks(self, bm: int, bn: int, bk: int, v_size: int, nnz: int, m: int, n: int, k: int):
assert (bm % v_size == 0)
vm = bm // v_size
assert ((bn + bk) * vm + bn * bk + 2 <= 32) # Needs to fit in NEON v registers
A_regs = Matrix([[v(vm * c + r + 2) for c in range(bk)] for r in range(vm)])
B_regs = Matrix([[v(vm * bk + 2 + bn * r + c) for c in range(bn)] for r in range(bk)])
C_regs = Matrix([[v(32 - vm * bn + vm * c + r) for c in range(bn)]
for r in range(vm)])
alpha_reg = [v(0), v(0)]
beta_reg = [v(1), v(1)]
starting_regs = [r(0), r(1), r(2)]
additional_regs = [r(11), xzr]
loop_reg = r(12)
return A_regs, B_regs, C_regs, starting_regs, alpha_reg, beta_reg, loop_reg, additional_regs
def bcst_alpha_beta(self,
alpha_reg: Register,
beta_reg: Register,
) -> Block:
asm = block("Broadcast alpha and beta so that efficient multiplication is possible")
return asm
def make_scaling_offsets(self,
additional_regs: List[Register],
nnz: int
) -> Block:
asm = block("No register based scaling")
return asm
def make_b_pointers(self,
B_reg: Register,
additional_regs: List[Register],
nnz: int
) -> Block:
asm = block("No register based scaling")
return asm
def move_register_block(self,
cursor: Cursor,
cursor_ptr: CursorLocation,
block_offset: Coords,
registers: Matrix[Register],
v_size: int,
additional_regs,
mask: Matrix[bool] = None,
store: bool = False,
prefetching: str = None,
load_offset: int = 0
) -> Block:
rows, cols = registers.shape
action = "Store" if store else "Load"
asm = block("{} {} register block @ {}".format(action, cursor.name, block_offset))
cur11 = -1000
skipflag = False
for ic in range(cols):
for ir in range(rows):
if skipflag:
skipflag = False
continue
if (mask is None) or (mask[ir, ic]):
cell_offset = Coords(down=ir * v_size, right=ic)
addr, comment = cursor.look(cursor_ptr, block_offset, cell_offset)
addr.disp += 8 * load_offset
next_offset = [0, 0]
if ir + 1 < rows:
next_offset = [1, 0]
elif ic + 1 < rows:
next_offset = [0, 1]
addr_next, comment_next = cursor.look(cursor_ptr, block_offset,
Coords(down=(ir + next_offset[0]) * v_size,
right=ic + next_offset[1]))
addr_next.disp += 8 * load_offset
if addr_next.disp == addr.disp + 8 * v_size:
skipflag = True
if addr.disp > 255:
if (addr.disp - cur11 > 0 and addr.disp - cur11 < 256):
addr.disp = addr.disp - cur11
else:
asm.add(add(addr.disp, additional_regs[0], "", addr.base))
cur11 = addr.disp
addr.disp = 0
addr.base = additional_regs[0]
if not skipflag:
if store:
asm.add(st(registers[ir, ic], addr, True, comment))
else:
asm.add(ld(addr, registers[ir, ic], True, comment))
else:
if store:
asm.add(st(registers[ir, ic], addr, True, comment,
registers[ir + next_offset[0], ic + next_offset[1]]))
else:
asm.add(ld(addr, registers[ir, ic], True, comment,
registers[ir + next_offset[0], ic + next_offset[1]]))
return asm
def make_zero_block(self, registers: Matrix[Register], additional_regs) -> Block:
rows, cols = registers.shape
asm = block("zero registers")
for ic in range(cols):
for ir in range(rows):
asm.add(mov(additional_regs[1], registers[ir, ic], True))
return asm
def make_microkernel(self,
A: Cursor,
B: Cursor,
A_ptr: CursorLocation,
B_ptr: CursorLocation,
A_regs: Matrix[Register],
B_regs,
C_regs: Matrix[Register],
v_size: int,
additional_regs,
to_A_block: Coords = Coords(),
to_B_block: Coords = Coords()
) -> Block:
""" make_microkernel generates a GEMM microkernel for two blocks using the outer-product formulation.
It is responsible for loading and unloading the A block,
It does not assume that the A or B cursors point to the start of the block.
Instead, the coordinates to the start of the block are passed separately.
It does not modify any cursor pointers.
"""
asm = block("Block GEMM microkernel")
bm, bk, aidx, apattern = A.get_block(A_ptr, to_A_block)
bk, bn, bidx, bpattern = B.get_block(B_ptr, to_B_block)
assert (bm % v_size == 0)
mask = sparse_mask(A_regs, A, A_ptr, to_A_block, B, B_ptr, to_B_block, v_size)
asm.add(self.move_register_block(A, A_ptr, to_A_block, A_regs, v_size, additional_regs, mask, store=False))
x = 0;
bs = []
cur11 = -1000
for Vmi in range(bm // v_size):
for bki in range(bk): # inside this k-block
for bni in range(bn): # inside this n-block
to_cell = Coords(down=bki, right=bni)
if B.has_nonzero_cell(B_ptr, to_B_block, to_cell):
B_cell_addr, B_comment = B.look(B_ptr, to_B_block, to_cell)
if B_regs[bki, bni] not in bs:
if B_cell_addr.disp > 255:
if (B_cell_addr.disp - cur11 > 0 and B_cell_addr.disp - cur11 < 256):
B_cell_addr.disp = B_cell_addr.disp - cur11
else:
asm.add(add(B_cell_addr.disp, additional_regs[0], "", B_cell_addr.base))
cur11 = B_cell_addr.disp
B_cell_addr.disp = 0
B_cell_addr.base = additional_regs[0]
asm.add(ld(B_cell_addr, B_regs[bki, bni], True, B_comment))
bs.append(B_regs[bki, bni])
for Vmi in range(bm // v_size):
for bki in range(bk): # inside this k-block
for bni in range(bn): # inside this n-block
to_cell = Coords(down=bki, right=bni)
if B.has_nonzero_cell(B_ptr, to_B_block, to_cell):
B_cell_addr, B_comment = B.look(B_ptr, to_B_block, to_cell)
comment = "C[{}:{},{}] += A[{}:{},{}]*{}".format(Vmi * v_size, Vmi * v_size + v_size, bni,
Vmi * v_size, Vmi * v_size + v_size, bki,
B_comment)
asm.add(fma(B_regs[bki, bni], A_regs[Vmi, bki], C_regs[Vmi, bni], comment=comment))
return asm
def init_prefetching(self, prefetching):
Generator.template = Generator.template.format(prefetching_mov="", prefetching_decl='', funcName="{funcName}",
body_text="{body_text}", clobbered="{clobbered}")
```
#### File: PSpaMM/codegen/generator.py
```python
from cursors import *
from codegen.ast import *
from codegen.precision import *
from abc import ABC, abstractmethod
class AbstractGenerator(ABC):
def __init__(self, precision: Precision):
self.precision = precision
def get_precision(self):
return self.precision
@abstractmethod
def get_v_size(self):
pass
@abstractmethod
def get_template(self):
pass
@abstractmethod
def make_reg_blocks(self, bm:int, bn:int, bk:int, v_size:int, nnz:int, m:int, n:int, k:int):
pass
@abstractmethod
def move_register_block(self,
cursor: Cursor,
cursor_ptr: CursorLocation,
block_offset: Coords,
registers: Matrix[Register],
v_size: int,
additional_regs,
mask: Matrix[bool] = None,
store: bool = False
) -> Block:
pass
@abstractmethod
def make_zero_block(self, registers: Matrix[Register], additional_regs) -> Block:
pass
@abstractmethod
def make_microkernel(self,
A: Cursor,
B: Cursor,
A_ptr: CursorLocation,
B_ptr: CursorLocation,
A_regs: Matrix[Register],
B_regs,
C_regs: Matrix[Register],
v_size:int,
additional_regs,
to_A_block: Coords = Coords(),
to_B_block: Coords = Coords()
) -> Block:
pass
@abstractmethod
def init_prefetching(self, prefetching):
pass
```
#### File: PSpaMM/codegen/precision.py
```python
from enum import Enum
class Precision(Enum):
DOUBLE = 8
SINGLE = 4
@classmethod
def getCType(cls, precision):
ctype = {cls.DOUBLE: 'double', cls.SINGLE: 'float'}
return ctype[precision]
```
#### File: PSpaMM/codegen/visitor.py
```python
from codegen.ast import *
class Visitor:
def visitStmt(self, stmt: GenericStmt) -> None:
raise NotImplementedError()
def visitMov(self, stmt: MovStmt) -> None:
raise NotImplementedError()
def visitLea(self, stmt: LeaStmt) -> None:
raise NotImplementedError()
def visitLoad(self, stmt: LoadStmt) -> None:
raise NotImplementedError()
def visitStore(self, stmt: StoreStmt) -> None:
raise NotImplementedError()
def visitPrefetch(self, stmt: PrefetchStmt) -> None:
raise NotImplementedError()
def visitAdd(self, stmt: AddStmt) -> None:
raise NotImplementedError()
def visitLabel(self, stmt: LabelStmt) -> None:
raise NotImplementedError()
def visitFma(self, stmt: FmaStmt) -> None:
raise NotImplementedError()
def visitMul(self, stmt: MulStmt) -> None:
raise NotImplementedError()
def visitBcst(self, stmt: BcstStmt) -> None:
raise NotImplementedError()
def visitCmp(self, stmt: CmpStmt) -> None:
raise NotImplementedError()
def visitJump(self, stmt: JumpStmt) -> None:
raise NotImplementedError()
def visitData(self, stmt: DataStmt) -> None:
raise NotImplementedError()
def visitBlock(self, stmt: Block) -> None:
raise NotImplementedError()
def visitCommand(self, stmt: Command) -> None:
raise NotImplementedError()
```
#### File: PSpaMM/scripts/max_arm.py
```python
def getBlocksize(m , n, bk):
bm = 2
bn = 1
maxval = 0
for i in range(2, m+1, 2):
for j in range(1, n+1):
if ARM_condition(i, j, bk):
if i*j > maxval:
maxval = i*j
bm = i
bn = j
return (bm, bn)
def ARM_condition(bm, bn, bk):
return (bn+bk) * (bm / 2) + bn + 2 <= 32
``` |
{
"source": "Jonas2019/car-accident-analysis",
"score": 2
} |
#### File: car-accident-analysis/ETL/poi_etl.py
```python
import sys
assert sys.version_info >= (3, 5) # make sure we have Python 3.5+
from pyspark.sql import SparkSession, functions
from pyspark.sql.types import *
from data_filtering import *
accidents_schema = StructType([
StructField('ID', StringType()),
StructField('Severity', IntegerType()),
StructField('Start_Time', TimestampType()),
StructField('End_Time', TimestampType()),
StructField('Start_Lat', DoubleType()),
StructField('Start_Lng', DoubleType()),
StructField('End_Lat', DoubleType()),
StructField('End_Lng', DoubleType()),
StructField('Distance(mi)', DoubleType()),
StructField('Description', StringType()),
StructField('Number', StringType()),
StructField('Street', StringType()),
StructField('Side', StringType()),
StructField('City', StringType()),
StructField('County', StringType()),
StructField('State', StringType()),
StructField('Zipcode', StringType()),
StructField('Country', StringType()),
StructField('Timezone', StringType()),
StructField('Airport_Code', StringType()),
StructField('Weather_Timestamp', TimestampType()),
StructField('Temperature(F)', DoubleType()),
StructField('Wind_Chill(F)', DoubleType()),
StructField('Humidity(%)', DoubleType()),
StructField('Pressure(in)', DoubleType()),
StructField('Visibility(mi)', DoubleType()),
StructField('Wind_Direction', StringType()),
StructField('Wind_Speed(mph)', DoubleType()),
StructField('Precipitation(in)', DoubleType()),
StructField('Weather_Condition', StringType()),
StructField('Amenity', BooleanType()),
StructField('Bump', BooleanType()),
StructField('Crossing', BooleanType()),
StructField('Give_Way', BooleanType()),
StructField('Junction', BooleanType()),
StructField('No_Exit', BooleanType()),
StructField('Railway', BooleanType()),
StructField('Roundabout', BooleanType()),
StructField('Station', BooleanType()),
StructField('Stop', BooleanType()),
StructField('Traffic_Calming', BooleanType()),
StructField('Traffic_Signal', BooleanType()),
StructField('Turning_Loop', BooleanType()),
StructField('Sunrise_Sunset', StringType()),
StructField('Civil_Twilight', StringType()),
StructField('Nautical_Twilight', StringType()),
StructField('Astronomical_Twiligh', StringType()),
])
def main():
data = spark.read.csv('Accident_No_NA.csv',
schema=accidents_schema, header=True)
data = dataFiltering(data)
columns = ['ID', 'Severity', 'Visibility(mi)', 'Amenity', 'Bump',
'Crossing', 'Give_Way', 'Junction', 'No_Exit', 'Railway',
'Roundabout', 'Station', 'Stop', 'Traffic_Calming',
'Traffic_Signal', 'Turning_Loop']
POI_df = data.select(columns)
POI_df = POI_df.withColumn('has_POI',
functions.when((
POI_df['Amenity'] | POI_df['Bump'] |
POI_df['Crossing'] | POI_df['Give_Way'] |
POI_df['Junction'] | POI_df['No_Exit'] |
POI_df['Railway'] | POI_df['Roundabout'] |
POI_df['Station'] | POI_df['Stop'] |
POI_df['Traffic_Calming'] |
POI_df['Traffic_Signal'] |
POI_df['Turning_Loop']), True)
.otherwise(False)
).cache()
avg_visi = POI_df.filter(POI_df['Visibility(mi)']>=0.0)
avg_visi = avg_visi.select('Severity', 'Visibility(mi)')
avg_visi = avg_visi.groupBy('Severity').avg('Visibility(mi)')
avg_visi = avg_visi.orderBy('Severity')
avg_visi.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:<EMAIL>.<EMAIL>.<EMAIL>.net/CMPT732.AvgVisi'
).save()
num_acc = POI_df.groupby('has_POI').count()
num_acc.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:<EMAIL>.mongodb.net/CMPT732.numPOI'
).save()
severity_hasPOI = POI_df.filter(POI_df['has_POI'])
severity_hasPOI = severity_hasPOI.select('Severity')
severity_hasPOI = severity_hasPOI.groupby('Severity').count()
severity_hasPOI = severity_hasPOI.orderBy('Severity')
severity_hasPOI.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:cmpt732@<EMAIL>32.jfbfw.mongodb.net/CMPT732.SevHasPOI'
).save()
severity_noPOI = POI_df.filter(~POI_df['has_POI'])
severity_noPOI = severity_noPOI.select('Severity')
severity_noPOI = severity_noPOI.groupby('Severity').count()
severity_noPOI = severity_noPOI.orderBy('Severity')
severity_noPOI.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:cmpt732@<EMAIL>32.jfbfw.mongodb.net/CMPT732.SevNoPOI'
).save()
POI = ['Amenity', 'Bump', 'Crossing', 'Give_Way', 'Junction',
'No_Exit', 'Railway', 'Roundabout', 'Station', 'Stop',
'Traffic_Calming', 'Traffic_Signal', 'Turning_Loop']
severity_POI = POI_df.filter(POI_df['has_POI'])
severity_POI = severity_POI.select(
[POI_df[c].cast(IntegerType())
if c in POI else POI_df[c]
for c in POI_df.columns]
).cache()
count_POI = severity_POI.select(POI)
count_POI = count_POI.groupby().sum()
count_POI.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:<EMAIL>@<EMAIL>.<EMAIL>.mongodb.net/CMPT732.countPOI'
).save()
count_severity = severity_POI.select(['Severity'] + POI)
count_severity = count_severity.groupby('Severity').sum()
count_severity = count_severity.orderBy('Severity')
count_severity.write.format('mongo').mode('overwrite').option(
'spark.mongodb.output.uri',
'mongodb+srv://dbAdmin:cmp<EMAIL>@<EMAIL>2.jfbfw.mongodb.net/CMPT732.countSev'
).save()
if __name__ == '__main__':
spark = SparkSession\
.builder\
.master('local[2]')\
.appName('accidents_etl')\
.config("spark.mongodb.input.uri", 'mongodb+srv://dbAdmin:<EMAIL>@<EMAIL>.jfbfw.mongodb.net/CMPT732.Project')\
.config('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@<EMAIL>32.jfbfw.mongodb.net/CMPT732.Project')\
.config('spark.jars.packages', 'org.mongodb.spark:mongo-spark-connector_2.12:3.0.1')\
.getOrCreate()
assert spark.version >= '3.0' # make sure we have Spark 3.0+
spark.sparkContext.setLogLevel('WARN')
sc = spark.sparkContext
main()
```
#### File: car-accident-analysis/ETL/test.py
```python
import pyspark
from pyspark.sql import SparkSession
from pyspark.sql.types import *
from pyspark.sql.functions import *
# This is our first testing exploration of the data and possible implementations
# Configure spark session
spark = SparkSession\
.builder\
.master('local[2]')\
.appName('accidents_etl')\
.config("spark.mongodb.input.uri", 'mongodb://127.0.0.1/Accident.us_accidents?readPreference=primaryPreferred')\
.config('spark.mongodb.output.uri', 'mongodb://127.0.0.1/Accident.us_accidents')\
.config('spark.jars.packages', 'org.mongodb.spark:mongo-spark-connector_2.12:3.0.1')\
.getOrCreate()
accidents_schema = StructType([
StructField('ID', StringType()),
StructField('Severity', DoubleType()),
StructField('Start_Time', StringType()),
StructField('End_Time', StringType()),
StructField('Start_Lat', DoubleType()),
StructField('Start_Lng', DoubleType()),
StructField('End_Lat', DoubleType()),
StructField('End_Lng', DoubleType()),
StructField('Distance(mi)', DoubleType()),
StructField('Description', StringType()),
StructField('Number', DoubleType()),
StructField('Street', StringType()),
StructField('Side', StringType()),
StructField('City', StringType()),
StructField('County', StringType()),
StructField('State', StringType()),
StructField('Zipcode', StringType()),
StructField('Country', StringType()),
StructField('Timezone', StringType()),
StructField('Airport_Code', StringType()),
StructField('Weather_Timestamp', StringType()),
StructField('Temperature(F)', DoubleType()),
StructField('Wind_Chill(F)', DoubleType()),
StructField('Humidity(%)', DoubleType()),
StructField('Pressure(in)', DoubleType()),
StructField('Visibility(mi)', DoubleType()),
StructField('Wind_Direction', StringType()),
StructField('Wind_Speed(mph)', DoubleType()),
StructField('Precipitation(in)', DoubleType()),
StructField('Weather_Condition', StringType()),
StructField('Amenity', StringType()),
StructField('Bump', StringType()),
StructField('Crossing', StringType()),
StructField('Give_Way', StringType()),
StructField('Junction', StringType()),
StructField('No_Exit', StringType()),
StructField('Railway', StringType()),
StructField('Roundabout', StringType()),
StructField('Station', StringType()),
StructField('Stop', StringType()),
StructField('Traffic_Calming', StringType()),
StructField('Traffic_Signal', StringType()),
StructField('Turning_Loop', StringType()),
StructField('Sunrise_Sunset', StringType()),
StructField('Civil_Twilight', StringType()),
StructField('Nautical_Twilight', StringType()),
StructField('Astronomical_Twiligh', StringType()),
])
# Load the dataset
df_load = spark.read.csv(r"Accident_No_NA.csv", schema=accidents_schema)
# Drop fields we don't need from df_load
lst_dropped_columns = ['ID','Description','Turning_Loop','Country','Weather_Timestamp','Number','Wind_Chill(F)']
df_load = df_load.drop(*lst_dropped_columns).cache()
# Preview df_load
df_load.show(5)
#df_clean1 = df_load.select('Wind_Direction').distinct()
#print(df_load.collect())
df_load = df_load.withColumn('Wind_Direction', when((df_load['Wind_Direction'] == 'WSW') | (df_load['Wind_Direction'] == 'WNW') | (df_load['Wind_Direction'] == 'W'), 'West')
.when((df_load['Wind_Direction'] == 'SSW') | (df_load['Wind_Direction'] == 'SSE') | (df_load['Wind_Direction'] == 'SW') | (df_load['Wind_Direction'] == 'S') | (df_load['Wind_Direction'] == 'SE'), 'South')
.when((df_load['Wind_Direction'] == 'NNW') | (df_load['Wind_Direction'] == 'NNE') | (df_load['Wind_Direction'] == 'NW') | (df_load['Wind_Direction'] == 'NE') | (df_load['Wind_Direction'] == 'N'), 'North')
.when((df_load['Wind_Direction'] == 'ESE') | (df_load['Wind_Direction'] == 'ENE') | (df_load['Wind_Direction'] == 'E'), 'East')
.when(df_load['Wind_Direction'] == 'CALM', 'Clam')
.when(df_load['Wind_Direction'] == 'VAR', 'Variable')
.otherwise(df_load['Wind_Direction']))
#df_load = df_load.select('Weather_Condition').distinct()
#print(df_load.collect())
df_load = df_load.withColumn('Weather_Condition', when(df_load['Weather_Condition'].rlike('Fog|Overcast|Haze|Mist|Smoke'), 'Fog')
.when(df_load['Weather_Condition'].rlike('Clear|Fair'), 'Clear')
.when(df_load['Weather_Condition'].rlike('Rain|Showers|Drizzle|Thunder'), 'Rain')
.when(df_load['Weather_Condition'].rlike('Ice|Snow|Sleet|Hail'), 'Snow')
.when(df_load['Weather_Condition'].rlike('Storm|storm|Tornado'), 'Storm')
.when(df_load['Weather_Condition'].rlike('Stand|Dust'), 'Sand')
.when(df_load['Weather_Condition'].rlike('Cloudy|Clouds|Cloud'), 'Cloudy')
.otherwise('Other'))
# Create a year field and add it to the dataframe
df_load = df_load.withColumn('Year', year(to_timestamp('Start_Time')))
df_load.show(5)
# Build the accidents frequency dataframe using the year field and counts for each year
df_accidents_freq = df_load.groupBy('Year').count().withColumnRenamed('count', 'Counts').sort('Year')
df_accidents_freq.show(5)
# Write df_quake_freq to mongodb
df_accidents_freq.write.format('mongo')\
.mode('overwrite')\
.option('spark.mongodb.output.uri', 'mongodb://127.0.0.1:27017Accident.us_accidents').save()
"""
Section: Data visulization
"""
import pandas as pd
from bokeh.io import output_notebook, output_file
from bokeh.plotting import figure, show, ColumnDataSource
from bokeh.models.tools import HoverTool
import math
from math import pi
from bokeh.palettes import Category20c
from bokeh.transform import cumsum
from bokeh.tile_providers import CARTODBPOSITRON
from bokeh.themes import built_in_themes
from bokeh.io import curdoc
from pymongo import MongoClient
# Create a custom read function to read data from mongodb into a dataframe
def read_mongo(host='127.0.0.1', port=27017, username=None, password=<PASSWORD>, db='Quake', collection='pred_results'):
mongo_uri = 'mongodb://{}:{}/{}.{}'.format(host, port, db, collection)
# Connect to mongodb
conn = MongoClient(mongo_uri)
db = conn[db]
# Select all records from the collection
cursor = db[collection].find()
# Create the dataframe
df = pd.DataFrame(list(cursor))
# Delete the _id field
del df['_id']
return df
# Load the datasets from mongodb
df_quakes = read_mongo(collection='quakes')
df_quake_freq = read_mongo(collection='quake_freq')
df_quake_pred = read_mongo(collection='pred_results')
df_quakes_2016 = df_quakes[df_quakes['Year'] == 2016]
# Preview df_quakes_2016
df_quakes_2016.head()
# Show plots embedded in jupyter notebook
output_notebook()
# Create custom style function to style our plots
def style(p):
# Title
p.title.align = 'center'
p.title.text_font_size = '20pt'
p.title.text_font = 'serif'
# Axis titles
p.xaxis.axis_label_text_font_size = '14pt'
p.xaxis.axis_label_text_font_style = 'bold'
p.yaxis.axis_label_text_font_size = '14pt'
p.yaxis.axis_label_text_font_style = 'bold'
# Tick labels
p.xaxis.major_label_text_font_size = '12pt'
p.yaxis.major_label_text_font_size = '12pt'
# Plot the legend in the top left corner
p.legend.location = 'top_left'
return p
# Create the Geo Map plot
def plotMap():
lat = df_quakes_2016['Latitude'].values.tolist()
lon = df_quakes_2016['Longitude'].values.tolist()
pred_lat = df_quake_pred['Latitude'].values.tolist()
pred_lon = df_quake_pred['Longitude'].values.tolist()
lst_lat = []
lst_lon = []
lst_pred_lat = []
lst_pred_lon = []
i = 0
j = 0
# Convert Lat and Long values into merc_projection format
for i in range(len(lon)):
r_major = 6378137.000
x = r_major * math.radians(lon[i])
scale = x / lon[i]
y = 180.0 / math.pi * math.log(math.tan(math.pi / 4.0 +
lat[i] * (math.pi / 180.0) / 2.0)) * scale
lst_lon.append(x)
lst_lat.append(y)
i += 1
# Convert predicted lat and long values into merc_projection format
for j in range(len(pred_lon)):
r_major = 6378137.000
x = r_major * math.radians(pred_lon[j])
scale = x / pred_lon[j]
y = 180.0 / math.pi * math.log(math.tan(math.pi / 4.0 +
pred_lat[j] * (math.pi / 180.0) / 2.0)) * scale
lst_pred_lon.append(x)
lst_pred_lat.append(y)
j += 1
df_quakes_2016['coords_x'] = lst_lat
df_quakes_2016['coords_y'] = lst_lon
df_quake_pred['coords_x'] = lst_pred_lat
df_quake_pred['coords_y'] = lst_pred_lon
# Scale the circles
df_quakes_2016['Mag_Size'] = df_quakes_2016['Magnitude'] * 4
df_quake_pred['Mag_Size'] = df_quake_pred['Pred_Magnitude'] * 4
# create datasources for our ColumnDataSource object
lats = df_quakes_2016['coords_x'].tolist()
longs = df_quakes_2016['coords_y'].tolist()
mags = df_quakes_2016['Magnitude'].tolist()
years = df_quakes_2016['Year'].tolist()
mag_size = df_quakes_2016['Mag_Size'].tolist()
pred_lats = df_quake_pred['coords_x'].tolist()
pred_longs = df_quake_pred['coords_y'].tolist()
pred_mags = df_quake_pred['Pred_Magnitude'].tolist()
pred_year = df_quake_pred['Year'].tolist()
pred_mag_size = df_quake_pred['Mag_Size'].tolist()
# Create column datasource
cds = ColumnDataSource(
data=dict(
lat=lats,
lon=longs,
mag=mags,
year=years,
mag_s=mag_size
)
)
pred_cds = ColumnDataSource(
data=dict(
pred_lat=pred_lats,
pred_long=pred_longs,
pred_mag=pred_mags,
year=pred_year,
pred_mag_s=pred_mag_size
)
)
# Tooltips
TOOLTIPS = [
("Year", " @year"),
("Magnitude", " @mag"),
("Predicted Magnitude", " @pred_mag")
]
# Create figure
p = figure(title='Earthquake Map',
plot_width=2300, plot_height=450,
x_range=(-2000000, 6000000),
y_range=(-1000000, 7000000),
tooltips=TOOLTIPS)
p.circle(x='lon', y='lat', size='mag_s', fill_color='#cc0000', fill_alpha=0.7,
source=cds, legend='Quakes 2016')
# Add circles for our predicted earthquakes
p.circle(x='pred_long', y='pred_lat', size='pred_mag_s', fill_color='#ccff33', fill_alpha=7.0,
source=pred_cds, legend='Predicted Quakes 2017')
p.add_tile(CARTODBPOSITRON)
# Style the map plot
# Title
p.title.align = 'center'
p.title.text_font_size = '20pt'
p.title.text_font = 'serif'
# Legend
p.legend.location = 'bottom_right'
p.legend.background_fill_color = 'black'
p.legend.background_fill_alpha = 0.8
p.legend.click_policy = 'hide'
p.legend.label_text_color = 'white'
p.xaxis.visible = False
p.yaxis.visible = False
p.axis.axis_label = None
p.axis.visible = False
p.grid.grid_line_color = None
# show(p)
return p
# plotMap()
# Create the Bar Chart
def plotBar():
# Load the datasource
cds = ColumnDataSource(data=dict(
yrs=df_quake_freq['Year'].values.tolist(),
numQuakes=df_quake_freq['Counts'].values.tolist()
))
# Tooltip
TOOLTIPS = [
('Year', ' @yrs'),
('Number of earthquakes', ' @numQuakes')
]
# Create a figure
barChart = figure(title='Frequency of Earthquakes by Year',
plot_height=400,
plot_width=1150,
x_axis_label='Years',
y_axis_label='Number of Occurances',
x_minor_ticks=2,
y_range=(0, df_quake_freq['Counts'].max() + 100),
toolbar_location=None,
tooltips=TOOLTIPS)
# Create a vertical bar
barChart.vbar(x='yrs', bottom=0, top='numQuakes',
color='#cc0000', width=0.75,
legend='Year', source=cds)
# Style the bar chart
barChart = style(barChart)
# show(barChart)
return barChart
# plotBar()
# Create a magnitude plot
def plotMagnitude():
# Load the datasource
cds = ColumnDataSource(data=dict(
yrs=df_quake_freq['Year'].values.tolist(),
avg_mag=df_quake_freq['Avg_Magnitude'].round(1).values.tolist(),
max_mag=df_quake_freq['Max_Magnitude'].values.tolist()
))
# Tooltip
TOOLTIPS = [
('Year', ' @yrs'),
('Average Magnitude', ' @avg_mag'),
('Maximum Magnitude', ' @max_mag')
]
# Create the figure
mp = figure(title='Maximum and Average Magnitude by Year',
plot_width=1150, plot_height=400,
x_axis_label='Years',
y_axis_label='Magnitude',
x_minor_ticks=2,
y_range=(5, df_quake_freq['Max_Magnitude'].max() + 1),
toolbar_location=None,
tooltips=TOOLTIPS)
# Max Magnitude
mp.line(x='yrs', y='max_mag', color='#cc0000', line_width=2, legend='Max Magnitude', source=cds)
mp.circle(x='yrs', y='max_mag', color='#cc0000', size=8, fill_color='#cc0000', source=cds)
# Average Magnitude
mp.line(x='yrs', y='avg_mag', color='yellow', line_width=2, legend='Avg Magnitude', source=cds)
mp.circle(x='yrs', y='avg_mag', color='yellow', size=8, fill_color='yellow', source=cds)
mp = style(mp)
# show(mp)
return mp
# plotMagnitude()
# Display the visuals directly in the browser
output_file('dashboard.html')
# Change to a dark theme
curdoc().theme = 'dark_minimal'
# Build the grid plot
from bokeh.layouts import gridplot
# Make the grid
grid = gridplot([[plotMap()], [plotBar(), plotMagnitude()]])
# Shor the grid
show(grid)
``` |
{
"source": "Jonas231/OpticalDesignDocu_o",
"score": 3
} |
#### File: _build/jupyter_execute/KDP-2_spot_diagram_matplotlib.py
```python
import numpy as np
def built_spd_mac(file, a, b):
macroname = 'buildSPD'
file = open(file, "w")
def write_line(file, L):
file.writelines(L)
file.write("\n")
#write_line(file, L=['MACRO ' + str(macroname)])
# FOB ... object point specification, manual p. 159
# This command defines the object point from which subsequent rays will be traced
# FOB (qualifier word) , Y, X, Z, n, m
if isinstance(a, list) and isinstance(b, list):
#filename = "multi_fob_"+str(a[0])+"_to_"+str(a[-1])+"__"+str(b[0])+"_to_"+str(b[-1])
for i in range(0, len(a)):
filename = str(a[i]) + '_' + str(b[i])
# for one object point specification (field)
write_line(file, L=['VIG ON'])
write_line(file, L=['FOB, ' + str(a[i]) + ' ' + str(b[i])])
write_line(file, L=['SPD'])
# create an ascii file with the current spd
write_line(file, L=['AWRTSPOT spd_' + filename])
else:
filename = str(a)+'_'+str(b)
# for one object point specification (field)
write_line(file, L=['FOB, ' + str(a) + ' '+ str(b)])
write_line(file, L=['SPD'])
# create an ascii file with the current spd
write_line(file, L=['AWRTSPOT spd_'+filename])
#write_line(file, L=['EOM'])
file.close()
path_KDP_mac_spd = r'C:\Work\Tools\KDP\create_spd.DAT'
path_KDP_mac_spd_multi = r'C:\Work\Tools\KDP\create_spd_multi.DAT'
built_spd_mac(path_KDP_mac_spd, a = 0.1, b = 0.1)
a = np.linspace(0, 0.1, num = 11)
b = np.linspace(0, 0.1, num = 11)
A,B = np.meshgrid(a,b)
built_spd_mac(path_KDP_mac_spd_multi, a = A.flatten().tolist(), b = B.flatten().tolist())
# Now go to KDP-2 and execute the created macro width the command "input file create_spd.DAT" in the command line.
# ## Open and plot with python
# Use the below python function to open and plot the .ASC file created by the KDP-2 "macro", i.e. list of commands:
#
# In[2]:
import os
filename = os.path.join(os.path.dirname(path_KDP_mac_spd_multi), "SPD_0.1_.ASC")
#filename = os.path.join(os.path.dirname(path_KDP_mac_spd_multi), "SPD_0.01.ASC")
def load_spd(filename):
N_rays = int(np.loadtxt(filename, skiprows=0, max_rows = 1))-1
spd_L = []
for n in range(0, N_rays):
#print("n = ", n)
spd = np.loadtxt(filename, skiprows=1+n*25, max_rows=25, dtype='str')
spd = np.char.replace(spd, 'D', 'E').astype(np.float64)
spd_L.append(spd)
spd_L = np.array(spd_L)
# x,y,z, l, m, n and wavelength number of chief ray at object surface
# see KDP-2 manual, p. 178
chief_ray_info = np.loadtxt(filename, skiprows = 1+n*25 + 25, max_rows = 6, dtype = 'str')
chief_ray_info = np.char.replace(chief_ray_info , 'D', 'E').astype(np.float64)
return chief_ray_info, spd_L
chief_ray_info, spd_L = load_spd(filename)
print(spd_L.shape)
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rcParams['mathtext.default'] = 'regular'
plt.close("all")
get_ipython().run_line_magic('matplotlib', 'widget')
def plot_spot_diag(spd_L, unit = "mum"):
#unit = "mm"
if unit == "mum":
unit = "$\mu$m"
if unit == "mm":
fac = 1
if unit == "$\mu$m":
fac = 1000
fig = plt.figure("spd", figsize =(8,8))
ax = plt.subplot(111)
ax.title.set_text('spot diagram')
ax.set_xlabel('x-dist. / '+str(unit))
ax.set_ylabel('y-dist. / '+str(unit))
plt.minorticks_on()
plt.grid(which='major', axis='both', color = 'lightgray', linestyle = ':')
plt.grid(which='minor', axis='both', color = 'lightgray', linestyle = ':')
plt.subplots_adjust(left=0.25, bottom=0.25, right=None, top=None, wspace=None, hspace=None)
def centroidnp(arr):
length = arr.shape[0]
sum_x = np.nansum(arr[:, 0])
sum_y = np.nansum(arr[:, 1])
return sum_x/length, sum_y/length
ax.tick_params(axis='both', which = 'both', direction = 'in')
points_xy = spd_L[:,0,0:2] # 0 selects x and y, 0:2 keeps both
points_xy = points_xy[points_xy[:,0] != 0] #= np.nan
centroid = centroidnp(points_xy)
points_xc = points_xy[:,0]-centroid[0]
points_yc = points_xy[:,1]-centroid[1]
ax.scatter(points_xc*fac, points_yc*fac, marker = '+', color = 'blue', linewidth = 0.5)
#print("x = ", points_xc, "y = ", points_yc )
#points_xc_L.append(points_xc)
#points_yc_L.append(points_yc)
centroidc = (0,0)
print("centroid: ", centroid)
ax.axhline(y=centroidc[1]*fac, color='green', linestyle='--', linewidth = 0.5)
ax.axvline(x=centroidc[0]*fac, color='green', linestyle='--', linewidth = 0.5)
Maxx = np.abs(np.max(points_xc*fac))
Minx = np.abs(np.min(points_xc*fac))
Limx = np.max([Maxx,Minx])
Max = np.abs(np.max(points_yc*fac))
Min = np.abs(np.min(points_yc*fac))
Limy = np.max([Max,Min])
Lim = np.max([Limx,Limy])
#ax.set_xlim(-Lim,Lim)
#ax.set_ylim(-Lim,Lim)
ax.annotate(str(np.round(centroid,decimals = 3))+" mm", (centroidc[0]+Lim/20, centroidc[1]+Lim/20),
fontsize = 8, color = 'green')
start = -np.floor(Lim*10)/10
end = -start
stepsize = end/2
#ax.xaxis.set_ticks(np.arange(start, end+stepsize, stepsize))
#ax.yaxis.set_ticks(np.arange(start, end+stepsize, stepsize))
return centroid, points_xc, points_yc
print(chief_ray_info)
print(spd_L.shape)
# Plotting with matplotlib:
# In[3]:
centroid, points_xc, points_yc = plot_spot_diag(spd_L, unit = "mm")
print("centroid = ", centroid)
# Using plotly for plotting:
# In[4]:
import plotly.io as pio
import plotly.express as px
import plotly.offline as py
# In[5]:
df = px.data.iris()
fig = px.scatter(x = points_xc, y = points_yc, labels={"x": "x-dist. / mm","y": "y-dist. / mm"}, template='plotly')
fig.update_yaxes(scaleanchor = "x",scaleratio = 1,)
fig
# In[ ]:
# In[ ]:
```
#### File: Jonas231/OpticalDesignDocu_o/KDP_parser.py
```python
import time
import math
start = time.time()
import numpy as np
def search_string_in_file(file_name, string_to_search, not_string = "!"):
"""Search for the given string in file and return lines containing that string,
along with line numbers"""
# see: https://thispointer.com/python-search-strings-in-a-file-and-get-line-numbers-of-lines-containing-the-string/
line_number = 0
list_of_results = []
# Open the file in read only mode
with open(file_name, 'r') as read_obj:
# Read all lines in the file one by one
for line in read_obj:
# For each line, check if line contains the string
line_number += 1
if string_to_search in line and (not_string not in line):
# If yes, then add the line number & line as a tuple in the list
list_of_results.append((line_number, line.rstrip()))
# Return list of tuples containing line numbers and lines where string is found
# L = np.array(list_of_results)
return read_obj, np.array(list_of_results)
def search_string(file_name,read_obj, string_to_search, not_string = '!', not_string2 = '!'):
line_number = 0
list_of_results = []
with open(file_name, 'r') as read_obj:
# Read all lines in the file one by one
for line in read_obj:
# For each line, check if line contains the string
line_number += 1
if string_to_search in line and (not_string not in line) and (not_string2 not in line):
# If yes, then add the line number & line as a tuple in the list
list_of_results.append((line_number, line.rstrip()))
return np.array(list_of_results)
def return_optical_data(file_name,r, L):
"""
COATING , 0.00000000000000
CV , 0.00000000000000
TH , 0.100000000000000E+21
RAYERROR 0.00000000000000
AIR
"""
# Open the file in read only mode
#with open(file_name, 'r') as read_obj:
# Read all lines in the file one by one
# for line in read_obj:
coating_L = []
cv_L = []
th_L = []
rayerror_L = []
mat_L = []
#print("L = ", L[:,0])
L_EOS = search_string(file_name, r, "EOS") # FLDSRAYS
line_EOS = np.float(L_EOS[0][0])
for i in range(0, len(L)):
coating = np.float(np.loadtxt(file_name, skiprows = int(L[i,0]), max_rows = 1,dtype='str')[2])
coating_L.append(coating)
cv = np.float(np.loadtxt(file_name,skiprows=int(L[i, 0])+1, max_rows=1,dtype='str')[2])
cv_L.append(cv)
#th = np.float(np.loadtxt(file_name,skiprows=int(L[i, 0])+2, max_rows=1,dtype='str')[2])
#th_L.append(th)
#rayerror = np.loadtxt(file_name,skiprows=int(L[i, 0])+3, max_rows=1,dtype='str')[1]
#rayerror_L.append(rayerror)
#mat = np.loadtxt(file_name,skiprows=int(L[i, 0])+4, max_rows=1,dtype='str')
#mat_L.append(mat)
#print(rayerror)
#print(mat_L)
#print(rayerror_L)
coating = np.array(coating_L)
cv = np.array(cv_L)
#th = np.array(th_L)
rayerror = np.array(rayerror_L)
#mat = np.array(mat_L)
cc_L = []
L_cc = search_string(file_name, r, "CC")
#print("L_cc: ", L_cc)
for i in range(0, len(L_cc)):
if (int(L_cc[i,0])-1)< line_EOS:
cc = np.array(np.loadtxt(file_name, skiprows = int(L_cc[i,0])-1, max_rows = 1,dtype='str'))[2]
#print("clap:", cc)
cc_L.append(cc)
cc = np.array(cc_L)
print("cc: ", cc)
cc_pos_L = []
for j in range(0,len(L_cc)):
if (int(L_cc[j, 0]) - 1) < line_EOS:
cc_pos = np.min(np.argwhere(L_cc[j, 0] <= L[:, 0]))-1
cc_pos_L.append(cc_pos)
cc_pos = np.unique(np.array(cc_pos_L))
th_L = []
L_th = search_string(file_name, r, "TH", not_string = "FOLLOWING", not_string2 = "THM")
print("L_th: ", L_th)
for i in range(0, len(L_th)):
th = np.array(np.loadtxt(file_name, skiprows = int(L_th[i,0])-1, max_rows = 1,dtype='str'))
print("th:", th)
th_L.append(th)
th = np.array(th_L)[:,2].astype(np.float)
def find_elem(file_name, r, keyword):
clap_L = []
L_clap = search_string(file_name, r, keyword) # clear aperture
#print("L_clap: ", L_clap, len(L_clap))
for i in range(0, len(L_clap)):
clap = np.array(np.loadtxt(file_name, skiprows = int(L_clap[i,0])-1, max_rows = 1,dtype='str'))
#print("clap:", clap)
clap_L.append(clap)
print("clap_L: ", clap_L)
clap = clap_L#np.array(clap_L)
#print(L_clap[:,0])
clap_pos_L = []
for j in range(0, len(L_clap)):
if L_clap[j,0] <= L[-1,0]:
clap_pos = np.min(np.argwhere(L_clap[j,0] <= L[:,0]))-1
else:
clap_pos = len(L)-1
clap_pos_L.append(clap_pos)
return clap, clap_pos_L
clap, clap_pos_L = find_elem(file_name, r, keyword = "CLAP")
thm_L = []
thm_pos_L = []
L_thm = search_string(file_name, r, "THM") # clear aperture
#thm_pos = np.argwhere(L_thm[:, 0] <= L[:, 0])
#print("L_thm: ", np.array(L_thm[:,0]))
for j in range(0,len(L_thm)):
thm_pos = np.min(np.argwhere(L_thm[j, 0] <= L[:, 0]))-1
thm_pos_L.append(thm_pos)
#print("thm_pos: ", thm_pos_L)
for i in range(0, len(L_thm)):
thm = np.array(np.loadtxt(file_name, skiprows = int(L_thm[i,0])-1, max_rows = 1,dtype='str'))
#print("clap:", clap)
thm_L.append(thm)
thm_pos_L.append(thm_pos)
thm = np.array(thm_L)
#print(L_thm[:,0][0])
thm_pos = np.unique(np.array(thm_pos_L))
L_astop = search_string(file_name, r, "ASTOP") # aperture stop
#print("L_astop: ", L_astop)
astop_pos = np.min(np.argwhere(L_astop[:, 0] <= L[:, 0])) - 1
L_tilt = search_string(file_name, r, "TILT") # tilt
#print("L_tilt: ", L_tilt)
#print(L_tilt[0][0])
#print(L_tilt[0][1])
tilt_pos_L = []
tilt_L = []
tilttype_L = []
for j in range(0, len(L_tilt)):
tilt_pos = np.min(np.argwhere(L_tilt[j, 0] < L[:, 0])) - 1
tilt_pos_L.append(tilt_pos)
tilt = np.loadtxt(file_name, skiprows=int(L_tilt[j][0])-1, max_rows = 1, dtype='str', delimiter=',')
tilttype = tilt[0][4:]
tilt = tilt[1:].astype(np.float)
tilt_L.append(tilt)
tilttype_L.append(tilttype)
tilt_pos = np.unique(np.array(tilt_pos_L))
tilt_L = np.array(tilt_L)
tilttype_L = np.array(tilttype_L)
#print("tilt: ", tilt)
refl, refl_pos_L = find_elem(file_name, r, keyword="REFL")
refl_pos = np.unique(np.array(refl_pos_L))
keywords = "AIR" or "OHARA"
mat, mat_pos_L = find_elem(file_name, r, keyword=keywords)
L_refs = search_string(file_name, r, "REFS") # reflector
#print("L_refs: ", L_refs)
refs_pos = np.min(np.argwhere(L_refs[:, 0] <= L[:, 0])) - 1
L_aimray = search_string(file_name, r, "AIMRAY") # aimray
#print("L_aimray: ", L_aimray[0][1], L_aimray[1][1:])
L_aimray = [L_aimray[0][1],L_aimray[1][1:]]
L_mode = search_string(file_name, r, "MODE") # mode
#print("L_mode: ", L_mode)
mode = L_mode[0][1]
sptwt = np.loadtxt(file_name, skiprows=int(L_mode[0][0]), max_rows = 1, dtype='str', delimiter=',')[1:].astype(np.float)
sptwt2 = np.loadtxt(file_name, skiprows=int(L_mode[0][0])+1, max_rows = 1, dtype='str', delimiter=',')[1:].astype(np.float)
L_FLDSRAYS = search_string(file_name, r, "FLDSRAYS") # FLDSRAYS
#print("L_FLDSRAYS: ", L_FLDSRAYS[0][0])
fldrays = np.float(L_FLDSRAYS[0][1][12:])
# read all field rays
frays = np.loadtxt(file_name, skiprows=int(L_FLDSRAYS[0][0]), max_rows=200, dtype='str')
frays = np.char.replace(frays, 'D', 'E').astype(np.float)
#print(frays.shape)
frays2 = np.loadtxt(file_name, skiprows=int(L_FLDSRAYS[0][0])+200, max_rows=500, dtype='str')
frays2 = np.char.replace(frays2, 'D', 'E').astype(np.float)
#print(frays2.shape)
# FLDS MAX
L_fldsmax = search_string(file_name, r, "FLDS MAX") # mode
flds = np.loadtxt(file_name, skiprows=int(L_fldsmax[0][0]), dtype='str', delimiter = ',')[:,1:].astype(np.float)
#print(flds.shape)
#flds = flds[:,].astype(np.float)
return coating, cv, (cc_pos,cc),th, (clap_pos_L,clap), (thm_pos,thm), astop_pos, (tilt_pos, tilt_L, tilttype_L), refl_pos, (mat_pos_L, mat), refs_pos, L_aimray, mode, fldrays, frays, frays2, flds
if __name__ == "__main__":
import os
cwd = os.getcwd()
filename = os.path.join(cwd,os.path.join("KDP-2_examples","Cassegrain.DAT"))
#filename = "Newtonian.DAT"
r, L = search_string_in_file(filename, "#", "PIKUP")
coating, cv, cc, th, clap, thm, astop, tilt, refl, mat, refs, aimray, mode, fldrays, frays, frays2,flds = return_optical_data(filename,r, L)
# =============================================================0
# transfer to pyrate
n_surfaces = len(L)
if 0:
shape_L = []
builduplist = []
for i in range(0, n_surfaces):
shape = {"shape": "Conic", "curv": cv[i]}
shape_L.append(shape)
elem_L = []
elem_L.append(shape)
elem_L.append({"decz": th[i]})
elem_L.append(None)
if i == 0:
elem_L.append("source")
elif i == n_surfaces-1:
elem_L.append("image")
else:
elem_L.append(str(i))
if i == astop:
elem_L.append({"is_stop": True})
else:
elem_L.append({})
for j in range(0, len(refl)):
if i == refl[j]:
elem_L.append( {"is_mirror": True})
builduplist.append(tuple(elem_L))
builduplist = np.array(builduplist)
# definition of optical system
from pyrateoptics.raytracer.ray import RayBundle
from pyrateoptics.raytracer.analysis.optical_system_analysis import\
OpticalSystemAnalysis
from pyrateoptics import build_simple_optical_system, draw
from VisualOpticsPy import draw3D
(s, sysseq) = build_simple_optical_system(builduplist)
from pyrateoptics.raytracer.material.material_isotropic import\
ConstantIndexGlass
from pyrateoptics.raytracer.surface_shape import Conic
from pyrateoptics.raytracer.optical_element import OpticalElement
from pyrateoptics.raytracer.optical_system import OpticalSystem
from pyrateoptics.raytracer.surface import Surface
from pyrateoptics.raytracer.aim import Aimy
from pyrateoptics.raytracer.aperture import CircularAperture
# no elliptic aperture class exists in pyrate (have to define it here):
from VisualOpticsPy.aperture_add import EllipticAperture
#from pyrateoptics.raytracer.aperture import EllipticAperture
from pyrateoptics.raytracer.localcoordinates import LocalCoordinates
from pyrateoptics import draw
from VisualOpticsPy import draw3D
import logging
logging.basicConfig(level=logging.INFO)
wavelength = 0.5876e-3
# definition of optical system
# v = np.ones(3)# + 0.001*np.random.random(3)
# myeps = np.diag(v)
th = np.insert(th, 0,0) # object is at origin of system
th[th> 1e10] = 0.1111
lc_L = []
s = OpticalSystem.p()
name_L = []
tiltflag = 0
tilt_a_L = []
CS_L = []
to_deg = 1/180*math.pi
for i in range(0, n_surfaces):
tiltnext = 0
if i == 0:
name = "object"
elif i == n_surfaces-1:
name = "image"
else:
name = str(i)
if i in tilt[0]:
for j in range(0, len(tilt[0])):
if i == tilt[0][j]:
tilt_a = tilt[1][j]
if tilt[2][j] == " BEN ":
tiltnext = 1
else:
tiltnext = 0
else:
tilt_a = np.zeros(3)
name_L.append(name)
if i == 0:
CS = s.rootcoordinatesystem.name
else:
CS = lc_L[i-1].name
CS_L.append(CS)
if tiltflag == 0:
lc_L.append(s.addLocalCoordinateSystem(LocalCoordinates.p(name=name, decz=th[i], tiltx = tilt_a[0]*to_deg, tilty=tilt_a[1], tiltz = tilt_a[2], tiltThenDecenter = 0),
refname=CS))
if tiltflag == 1:
lc_L.append(s.addLocalCoordinateSystem(
LocalCoordinates.p(name=name, decz=th[i], tiltx=tilt_a_L[i-1][0]*to_deg, tilty=-tilt_a_L[i-1][1], tiltz=-tilt_a_L[i-1][2], tiltThenDecenter = 1),
refname=CS_L[i]))
if tiltnext:
tiltflag = 1
#tilt_a = -tilt_a_L[i-1]
else:
tiltflag = 0
tilt_a_L.append(tilt_a)
tilt_a_L = np.array(tilt_a_L)
CS_L = np.array(CS_L)
# note: s.rootcoordinatesystem.name is global coordinate system
lc_L = np.array(lc_L)
# air = AnisotropicMaterial(lc0, myeps) # tests for anisotropic mirror
air = ConstantIndexGlass.p(lc_L[0], 1.0)
s.material_background = air
surf_L = []
for i in range(0, n_surfaces):
Shape = Conic.p(lc_L[i], curv=cv[i])
if i in clap[0]:
for j in range(0, len(clap[1])):
if i == clap[0][j]:
Aperturetype = clap[1][j][1]
if Aperturetype == ',':
Aperturetype = "Circular"
if Aperturetype == "Circular":
Aperture = CircularAperture.p(lc_L[clap[0][j]], maxradius=np.float(clap[1][j][2]), minradius = 1) # which type of aperture
elif Aperturetype == 'ELIP':
Aperture = EllipticAperture.p(lc_L[clap[0][j]], cay=np.float(clap[1][1][3]), cax=np.float(clap[1][1][5]), Yd = np.float(clap[1][1][7]), Xd = np.float(clap[1][1][9]))
surf_L.append(Surface.p(lc_L[i], shape = Shape, aperture = Aperture)) # shape = , curv =, aperture = CircularAperture
else:
surf_L.append(Surface.p(lc_L[i], shape=Shape))
# Surface.p(lc6, aperture=CircularAperture.p(lc6, maxradius=20.0))
surf_L = np.array(surf_L)
global_cs = s.rootcoordinatesystem
elem = OpticalElement.p(global_cs, name="optical_element")
elem.addMaterial("air", air)
for i in range(0, len(surf_L)):
elem.addSurface(name_L[i], surf_L[i], (None, None))
s.addElement("optical_element", elem)
surf_list = []
for i in range(0, len(name_L)):
if i in refl:
if 1:
#for j in range(0, len(refl)):
if i != astop:
La = (name_L[i], {"is_mirror": True})
elif i == astop:
La = (name_L[i], {"is_stop": True}, {"is_mirror": True})
#elif i == astop and i != refl[j]:
# La = (name_L[i], {"is_stop": True})
surf_list.append(La)
else:
if i == astop:
La = (name_L[i], {"is_stop": True})
else:
surf_list.append((name_L[i], {}))
surf_list_array = np.array(surf_list)
#surf_list = np.unique(surf_list_array).tolist()
sysseq = [("optical_element", surf_list)]
#sysseq = [("optical_element",
# [
# ("object", {}),
# ("m1", {"is_mirror": True})
# ]
# )
# ]
# ==========================================================
#from pyrateoptics.raytracer.analysis.optical_system_analysis import OpticalSystemAnalysis
#from pyrateoptics.raytracer.ray import RayBundle
#osa = OpticalSystemAnalysis(s, sysseq, name="Analysis")
#wavelength = 0.5876e-3
#(o, k, E0) = osa.collimated_bundle(121, {"startz": -5., "radius": 11.43},
# wave=wavelength)
#initialbundle = RayBundle(x0=o, k0=k, Efield0=E0, wave=wavelength)
#r2 = s.seqtrace(initialbundle, sysseq)
Stopsize = np.float(clap[1][np.where(clap[0] == astop)[0][0]][2])
#a = Aimy(s, sysseq, name="Aimy", stopsize = Stopsize, num_pupil_points=5)
#a.pupil_raster = raster.MeridionalFan()
from pyrateoptics.raytracer.globalconstants import degree
from pyrateoptics.sampling2d import raster
from pyrateoptics import raytrace
#raysdict = {"radius": 5.0, "startz": -5., "starty": -20., "anglex": 1*degree,
# "raster": raster.MeridionalFan()}
#r_red = raytrace(s, sysseq, 20, raysdict, wave=wavelength, traceoptions={})[0]
if 0:
initbundle1 = a.aim(np.array([0, 0]))
initbundle2 = a.aim(np.array([0, 0.5 * degree]))
initbundle3 = a.aim(np.array([0, -0.5 * degree]))
(pp1, r1p) = s.para_seqtrace(a.pilotbundle, initbundle1, sysseq)
(pp2, r2p) = s.para_seqtrace(a.pilotbundle, initbundle2, sysseq)
(pp3, r3p) = s.para_seqtrace(a.pilotbundle, initbundle3, sysseq)
r1r = s.seqtrace(initbundle1, sysseq)
r2r = s.seqtrace(initbundle2, sysseq)
r3r = s.seqtrace(initbundle3, sysseq)
#sourcesurf = s.elements["optical_element"].surfaces["object"]
do_not_plot = [surf_L[0]]
#do_not_plot = []
# draw rays without raytracing in pyrateoptics
# read rays from ray file
# draw rays in pyvista
draw(s, do_not_draw_surfaces=do_not_plot)
#if plotter in globals():
# plotter.close()
plotter = draw3D.draw3D_pyvista(s, vertices=10, do_not_draw_surfaces=do_not_plot)
#plotter = draw3D.draw3D_pyvista(s, [(r1p, "blue"), (r2p, "green"), (r3p, "orange")])
# draw(s, r2)
#sourcesurf = s.elements["stdelem"].surfaces["source"]
#surf1 = s.elements["stdelem"].surfaces["1"]
#draw(s, r2, do_not_draw_surfaces=[sourcesurf, surf1], do_not_draw_raybundles=[initialbundle])
#draw3D.draw3D_pyvista(s, vertices=50, do_not_draw_surfaces=[sourcesurf])
# ============================================================
end = time.time()
print("Duration: ", np.round(end - start, decimals = 2), " s")
```
#### File: OpticalDesignDocu_o/VisualKDP/read_rays.py
```python
import numpy as np
filename = r"C:\D\optical design software demos (other than Zemax)\free software\KDP-2\RAYHIST.dat"
def load_multitrace(filename):
"""
Reads the ray history file produced with the KDP commands
("mrays", "rhist on", "mtrace", "rhist swrite/write")
# see function multi_raytrace_mac in create_rays.py
:param filename:
:return:
"""
# ray_info contains: number of rays, surfaces, fields
ray_info = np.loadtxt(filename, skiprows=0, max_rows = 1)
ray_info = ray_info.astype(np.float)
ray_info = np.array(ray_info)
n_rays = ray_info[0]
n_sur = ray_info[1]
n_flds = ray_info[2]
# read surface, sequential ray, rayhist data items
# columns: surface, ray, items (see p. 218,219)
rayhist = np.loadtxt(filename, skiprows=1, dtype='str')
rayhist = np.char.replace(rayhist, 'D', 'E').astype(np.float)
if rayhist.shape[1] == 15:
print("short ray history file (swrite)")
L_legend = [
"Surface #",
"Sequential Ray #",
"Local X - coordinate",
"Local Y - coordinate",
"Local Z - coordinate",
"Angle of Incidence",
"Ray Energy Term",
"X - component of the Angle of Incidence",
"Y - component of the Angle of Incidence",
"X - coordinate of cheif ray at object surface",
"Y - coordinate of cheif ray at object surface",
"XZ - slope angle, in radians, of the chief ray at the object surface",
"YZ - slope",
"Sequential number of the chief ray(from 1 to the maximum number of chief rays)",
"RAYCOD(1)(Ray failure code, 0 = no fail)",
"RAYCOD(2)(Surface where ray stopped)"]
L_legend = np.array(L_legend)
else:
L_legend = np.array([])
return ray_info, rayhist, L_legend
ray_info, rayhist, L_legend = load_multitrace(filename)
``` |
{
"source": "jonasalexander/configUI",
"score": 3
} |
#### File: jonasalexander/configUI/Encrypt.py
```python
import hashlib
def checkPassword(input):
print "Checking password..."
return saltAndHash(input) == saltAndHash(user_password)
def saltAndHash(password):
print "Hashing and salting \"%s\"..." %password
salt = <PASSWORD>/"
sha1 = hashlib.sha1()
sha1.update(password + salt)
return sha1.hexdigest()
# user_password = saltAndHash("<PASSWORD>")
# MARK: Development
user_password = saltAndHash("<PASSWORD>")
```
#### File: jonasalexander/configUI/Test.py
```python
from Tkinter import *
import Graphics
import TaskClass
root = Tk()
root.withdraw()
progress = 0
def cleanUp():
#Create pdf.
root.destroy()
window = Toplevel(root)
window.wm_title("BigRep Printer Configuration")
window.protocol('WM_DELETE_WINDOW', cleanUp)
photo = PhotoImage(file="Logo.pgm").subsample(8)
def runWin():
Label(window, image=photo).grid(row=0, column=1)
Label(window, text="BigRep Configuration UI", font=Graphics.getBigRepFont(window, 40)).grid(row=0, column = 2)
runWin()
root.mainloop()
'''
from Tkinter import *
import Graphics
import TaskClass
root = Tk()
root.withdraw()
progress = 0
def cleanUp():
#Create pdf.
root.destroy()
def runWin():
window = Toplevel(root)
window.wm_title("BigRep Printer Configuration")
window.protocol('WM_DELETE_WINDOW', cleanUp)
photo = PhotoImage(file="Logo.pgm").subsample(8)
Label(window, image=photo).grid(row=0, column=1)
Label(window, text="BigRep Configuration UI", font=Graphics.getBigRepFont(window, 40)).grid(row=0, column = 2)
tasks = TaskClass.tasks[progress:progress+TaskClass.groupSize]
contentFields = []
for r in range(len(tasks)):
row = r+1
label = Label(window, text=tasks[r].label).grid(row=row,column=1, sticky=W)
content = Entry(window, width=40)
content.grid(row=row, column=2, sticky=W)
content.insert(0, tasks[r].content)
contentFields += [content]
#MainWin.rowconfigure(r, minsize=MainWin.winfo_height/len(tasks))
#MainWin.columnconfigure(1, minsize=MainWin.winfo_width/2)
def onButtonClick():
print 'Button clicked!'
for num in range(0, TaskClass.groupSize-1):
TaskClass.tasks[progress + num] = contentFields[num].get()
window.destroy()
runWin(progress+2)
button = Button(window, text="Next", command=onButtonClick)
button.grid(row=progress+TaskClass.groupSize+1, column=2, sticky = W)
runWin()
root.mainloop()
'''
``` |
{
"source": "jonas/alloy-ui",
"score": 2
} |
#### File: yui-yuidoc/bin/yuidoc_generate.py
```python
''' Prints documentation with htmltmpl from the json data outputted by parser.py '''
import os, re, simplejson, shutil, logging, logging.config, time, datetime
from const import *
# from cStringIO import StringIO
from Cheetah.Template import Template
from sets import Set
import codecs
try:
logging.config.fileConfig(os.path.join(sys.path[0], LOGCONFIG))
except:
pass
log = logging.getLogger('yuidoc.generate')
class DocGenerator(object):
def __init__(self, inpath, datafile, outpath, templatepath, newext, showprivate=False,
projectname='Yahoo! UI Library',
version='',
projecturl='http://developer.yahoo.com/yui/',
ydn=False, copyrighttag='Yahoo! Inc.'):
def _mkdir(newdir):
if os.path.isdir(newdir): pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
head, tail = os.path.split(newdir)
if head and not os.path.isdir(head): _mkdir(head)
if tail: os.mkdir(newdir)
self.moduleprefix = MODULE_PREFIX
self.inpath = os.path.abspath(inpath)
# set and output path, create if needed
self.outpath = os.path.abspath(outpath)
self.newext = newext
_mkdir(self.outpath)
self.templatepath = os.path.abspath(templatepath)
# copy all of the directories from the template directory to the
# destination directory.
for i in os.listdir(self.templatepath):
fullname = os.path.join(self.templatepath, i)
if os.path.isdir(fullname):
targetdir = os.path.join(self.outpath, i)
try:
shutil.rmtree(targetdir)
except: pass
# requires 2.6
# shutil.copytree(fullname, targetdir, ignore=shutil.ignore_patterns(IGNORE_PATTERNS))
shutil.copytree(fullname, targetdir)
self.showprivate = showprivate
f = codecs.open(os.path.join(inpath, datafile), "r", "utf-8" )
self.rawdata = f.read()
# log.info('INPUT DATA: ' + self.rawdata)
d = self.data = simplejson.loads(self.rawdata)
self.projectname = projectname
self.projecturl = projecturl
self.copyrighttag = copyrighttag
self.ydn = ydn
self.version = version
self.modulename = ""
self.timestamp = "" # if supplied, linked script and css will have a timestamp appended to the url for cache busting
self.moduletitle = ""
self.moduledesc = "Please supply a module block somewhere in your code"
# self.requires = None
self.modules = d[MODULES]
self.modulenames = self.modules.keys()
self.modulenames.sort(lambda x,y: cmp(x.lower(), y.lower()))
self.cleansedmodulenames = {}
for mod in self.modulenames:
self.cleansedmodulenames[mod] = self.cleanseStr(mod)
self.cleansedmodulename = self.cleanseStr(self.modulename)
self.classname = ""
self.filename = ""
self.pagetype = ""
self.classmap = d[CLASS_MAP]
self.classnames = ""
self.filenames = ""
self.allprops = []
self.allprops_ext = []
def cleanseStr(self, strg):
cleanregex= re.compile(r"[^\w\-]")
cleansed = cleanregex.sub('', strg.lower())
# log.warn('cleansed module: %s' %(cleansed));
return self.moduleprefix + cleansed
def write(self, filename, data, template=True):
out = codecs.open( os.path.join(self.outpath, filename), "w", "utf-8" )
if template:
datastr = data.respond()
out.write(datastr)
else:
out.write(data)
out.close()
def process(self):
def assignGlobalProperties(template):
template.projectname = self.projectname
template.projecturl = self.projecturl
template.copyrighttag = self.copyrighttag
template.ydn = self.ydn
template.version = self.version
template.modules = self.modules
template.modulenames = self.modulenames
template.cleansedmodulenames = self.cleansedmodulenames
template.modulename = self.modulename
template.moduletitle = self.moduletitle
template.cleansedmodulename = self.cleansedmodulename
template.moduledesc = self.moduledesc
template.year = datetime.date.today().strftime('%Y')
template.filename = self.filename
if self.filename:
template.filepath = os.path.join(self.inpath, self.filename)
template.highlightcontent = codecs.open(os.path.join(self.inpath, self.filename + self.newext), "r", "utf-8" ).read()
template.pagetype = self.pagetype
template.classmap = self.classmap
template.classnames = self.classnames
template.filenames = self.filenames
template.classname = self.classname
template.requires = ""
template.optional = ""
template.properties = ""
template.methods = ""
template.events = ""
template.configs = ""
template.extends = ""
template.uses = ""
template.index = False # is this the index page
def transferToTemplate(prop, dict, template, valOverride=''):
val = ""
if prop in dict:
val = dict[prop]
if valOverride:
val = valOverride
setattr(template, prop, val)
def transferToDict(prop, dict1, dict2, default="", skipOverrideIfNoMatch=False):
val = ""
if prop in dict1:
val = dict1[prop]
if not val:
val = default
else:
if skipOverrideIfNoMatch:
pass
else:
val = default
dict2[prop] = val
def shouldShow(item):
if STATIC not in item and \
(self.showprivate or PRIVATE not in item):
return True
else:
return False
def shouldShowClass(item):
if self.showprivate or PRIVATE not in item:
return True
else:
return False
def soft_sort(x, y):
return cmp(x.lower(), y.lower())
def getPropsFromSuperclass(superc, classes, dict):
# get inherited data
if shouldShowClass(superc):
supercname = superc[NAME]
if PROPERTIES in superc:
inhdef = dict[PROPERTIES][supercname] = []
keys = superc[PROPERTIES].keys()
keys.sort(soft_sort)
for prop in keys:
superprop = superc[PROPERTIES][prop]
if shouldShow(superprop):
if PRIVATE in superprop: access = PRIVATE
elif PROTECTED in superprop: access = PROTECTED
else:access = ""
inhdef.append({NAME: prop, ACCESS: access, DEPRECATED: DEPRECATED in superprop})
if METHODS in superc:
inhdef = dict[METHODS][supercname] = []
keys = superc[METHODS].keys()
keys.sort(soft_sort)
for method in keys:
supermethod = superc[METHODS][method]
if shouldShow(supermethod):
if PRIVATE in supermethod: access = PRIVATE
elif PROTECTED in supermethod: access = PROTECTED
else:access = ""
inhdef.append({NAME: method, ACCESS: access, DEPRECATED: DEPRECATED in supermethod})
if EVENTS in superc:
inhdef = dict[EVENTS][supercname] = []
keys = superc[EVENTS].keys()
keys.sort(soft_sort)
for event in keys:
superevent = superc[EVENTS][event]
if shouldShow(superevent):
# inhdef.append(event)
if PRIVATE in superevent: access = PRIVATE
elif PROTECTED in superevent: access = PROTECTED
else:access = ""
inhdef.append({NAME: event, ACCESS: access, DEPRECATED: DEPRECATED in superevent})
if CONFIGS in superc:
inhdef = dict[CONFIGS][supercname] = []
keys = superc[CONFIGS].keys()
keys.sort(soft_sort)
for config in keys:
superconfig = superc[CONFIGS][config]
if shouldShow(superconfig):
#inhdef.append(config)
if PRIVATE in superconfig: access = PRIVATE
elif PROTECTED in superconfig: access = PROTECTED
else:access = ""
inhdef.append({NAME: config, ACCESS: access, DEPRECATED: DEPRECATED in superconfig})
if EXTENDS in superc:
supercname = superc[EXTENDS]
if supercname in classes:
getPropsFromSuperclass(classes[supercname], classes, dict)
if USES in superc:
for supercname in superc[USES]:
if supercname in classes:
getPropsFromSuperclass(classes[supercname], classes, dict)
# build url: class, property, type
def getUrl(c, p, t=''):
return "%s.html#%s_%s" %(c, t, p)
#sort is case insensitive and ignores puctuation for the search json file
def allprop_sort(x, y):
pat = re.compile(r"[\_\-\.]")
cx = x[NAME].lower()
cy = y[NAME].lower()
cx = pat.sub('', cx)
cy = pat.sub('', cy)
return cmp(cx, cy)
def completeProp(main, ext):
data = main.copy()
if DESCRIPTION in ext:
data[DESCRIPTION] = ext[DESCRIPTION]
else:
data[DESCRIPTION] = ''
if PARAMS in ext:
params = ext[PARAMS]
count = 0
result = []
itemtemplate = '%s <%s> %s'
for p in params:
if count > 0:
result.append(', ')
result.append(itemtemplate % (p[NAME] or 'unknown', p[TYPE] or 'Object', p[DESCRIPTION] or ''))
count+=1
data[PARAMS] = ''.join(result)
else:
data[PARAMS] = ''
return data
log.info("-------------------------------------------------------")
# copy the json file
# jsonname = self.cleansedmodulename + ".json"
jsonname = "raw.json"
log.info("Writing " + jsonname)
self.write(jsonname, self.rawdata, False)
for mname in self.modules:
log.info("Generating module splash for %s" %(mname))
m = self.modules[mname]
self.filename = ""
self.classname = ""
classes = self.data[CLASS_MAP]
self.classnames = []
for i in m[CLASS_LIST]:
if shouldShowClass(classes[i]):
self.classnames.append(i)
self.classnames.sort(soft_sort)
t = Template(file=os.path.join(self.templatepath, "main.tmpl"))
# @TODO add command line option for timestamp
# timestamp = time.time()
timestamp = ""
t.timestamp = timestamp
transferToTemplate(REQUIRES, m, t)
self.modulename = mname
self.moduletitle = mname
if TITLE in m:
self.moduletitle = m[TITLE]
self.cleansedmodulename = self.cleanseStr(mname)
if DESCRIPTION in m:
self.moduledesc = m[DESCRIPTION]
else:
log.warn("Missing module description for " + mname)
self.moduledesc = ''
self.filenames = m[FILE_LIST]
self.filenames.sort(soft_sort)
assignGlobalProperties(t)
transferToTemplate(REQUIRES, m, t)
transferToTemplate(OPTIONAL, m, t)
transferToTemplate(BETA, m, t, "Beta")
transferToTemplate(EXPERIMENTAL, m, t, "Experimental")
if len(m[SUBMODULES]) > 0:
strg = ', '.join(m[SUBMODULES])
else:
strg = 'none'
transferToTemplate(SUBMODULES, m, t, strg)
t.submodules = m[SUBMODULES]
transferToTemplate(SUBDATA, m, t, '')
t.subdata = m[SUBDATA]
moduleprops = []
classList = []
# class API view
#for i in classes:
for i in m[CLASS_LIST]:
self.classname = i
c = classes[i]
if shouldShowClass(c):
log.info("Generating API page for " + i)
assignGlobalProperties(t)
# template items that need default vaules even if not included
transferToTemplate( SEE, c, t )
transferToTemplate( DEPRECATED, c, t )
transferToTemplate( DESCRIPTION, c, t )
transferToTemplate( STATIC, c, t )
if STATIC in c: t.static = STATIC
transferToTemplate( FINAL, c, t )
if FINAL in c: t.final = FINAL
transferToTemplate( ACCESS, c, t )
if PRIVATE in c: t.access = PRIVATE
elif PROTECTED in c: t.access = PROTECTED
desc = ''
if DESCRIPTION in c:
desc = c[DESCRIPTION]
#subclasses
subclasses = self.subclasses = []
for j in classes:
if SUPERCLASS in classes[j] and classes[j][SUPERCLASS] == i:
subclasses.append(j)
t.subclasses = subclasses
gName = i.replace('YAHOO.widget.', '');
gName = gName.replace('YAHOO.util.', '');
classInfo = { DESCRIPTION: desc, NAME: i, GUESSEDNAME: gName, EXTENDS: [] }
# Properties/fields
props = t.properties = []
if PROPERTIES in c:
keys = c[PROPERTIES].keys()
keys.sort(soft_sort)
for propertykey in keys:
prop = c[PROPERTIES][propertykey]
if self.showprivate or PRIVATE not in prop:
propdata = {
NAME: propertykey,
HOST: i,
TYPE: 'property',
URL: getUrl(i, propertykey, PROPERTY)
}
transferToDict( ACCESS, prop, propdata )
if PRIVATE in prop: propdata[ACCESS] = PRIVATE
elif PROTECTED in prop: propdata[ACCESS] = PROTECTED
self.allprops.append(propdata.copy())
# completeProp(propdata, prop)
self.allprops_ext.append(completeProp(propdata, prop))
moduleprops.append(propdata.copy())
transferToDict( TYPE, prop, propdata, OBJECT )
transferToDict( DESCRIPTION, prop, propdata )
transferToDict( DEFAULT, prop, propdata )
transferToDict( DEPRECATED, prop, propdata, NBWS, DEPRECATED )
transferToDict( SEE, prop, propdata )
transferToDict( STATIC, prop, propdata )
if STATIC in prop: propdata[STATIC] = STATIC
transferToDict( FINAL, prop, propdata )
if FINAL in prop: propdata[FINAL] = FINAL
props.append(propdata)
# Methods
methods = t.methods = []
if METHODS in c:
keys = c[METHODS].keys()
keys.sort(soft_sort)
for methodkey in keys:
method = c[METHODS][methodkey]
if self.showprivate or PRIVATE not in method:
methoddata = {NAME: methodkey, HOST: i, TYPE: 'method', URL:getUrl(i, methodkey, METHOD)}
transferToDict( ACCESS, method, methoddata )
if PRIVATE in method: methoddata[ACCESS] = PRIVATE
elif PROTECTED in method: methoddata[ACCESS] = PROTECTED
self.allprops.append(methoddata.copy())
# completeProp(methodData, method)
self.allprops_ext.append(completeProp(methoddata, method))
moduleprops.append(methoddata.copy())
transferToDict( DESCRIPTION, method, methoddata )
transferToDict( DEPRECATED, method, methoddata, NBWS, DEPRECATED )
transferToDict( SEE, method, methoddata )
transferToDict( STATIC, method, methoddata )
if STATIC in method: methoddata[STATIC] = STATIC
transferToDict( FINAL, method, methoddata )
if FINAL in method: methoddata[FINAL] = FINAL
transferToDict( CHAINABLE, method, methoddata )
if CHAINABLE in method: methoddata[CHAINABLE] = CHAINABLE
ret = methoddata[RETURN] = {NAME:"", DESCRIPTION:"", TYPE:VOID}
if RETURN in method:
transferToDict( TYPE, method[RETURN], ret, "" )
transferToDict( DESCRIPTION, method[RETURN], ret )
params = methoddata[PARAMS] = []
if PARAMS in method:
mp = method[PARAMS]
for p in mp:
param = {}
transferToDict( NAME, p, param, UNKNOWN )
transferToDict( TYPE, p, param, OBJECT )
transferToDict( DESCRIPTION, p, param )
params.append(param)
methods.append(methoddata)
# Events
events = t.events = []
if EVENTS in c:
keys = c[EVENTS].keys()
keys.sort(soft_sort)
for eventkey in keys:
event = c[EVENTS][eventkey]
if self.showprivate or PRIVATE not in event:
eventdata = {
NAME: eventkey,
HOST: i,
TYPE: 'event',
URL: getUrl(i, eventkey, EVENT)
}
transferToDict( ACCESS, event, eventdata )
if PRIVATE in event: eventdata[ACCESS] = PRIVATE
elif PROTECTED in event: eventdata[ACCESS] = PROTECTED
self.allprops.append(eventdata.copy())
# completeProp(eventdata, event)
self.allprops_ext.append(completeProp(eventdata, event))
moduleprops.append(eventdata.copy())
transferToDict( DESCRIPTION, event, eventdata )
transferToDict( DEPRECATED, event, eventdata, NBWS, DEPRECATED )
transferToDict( SEE, event, eventdata )
transferToDict( STATIC, event, eventdata )
if STATIC in event: eventdata[STATIC] = STATIC
transferToDict( FINAL, event, eventdata )
if FINAL in event: eventdata[FINAL] = FINAL
transferToDict( BUBBLES, event, eventdata )
#Bubbles should contain a classname to bubble to
#if BUBBLES in event: eventdata[BUBBLES] = BUBBLES
transferToDict( PREVENTABLE, event, eventdata )
#preventable should contain a default method
#Bug #20
#if PREVENTABLE in event: eventdata[PREVENTABLE] = PREVENTABLE
transferToDict( CANCELABLE, event, eventdata )
if CANCELABLE in event: eventdata[CANCELABLE] = CANCELABLE
params = eventdata[PARAMS] = []
if PARAMS in event:
mp = event[PARAMS]
for p in mp:
param = {}
transferToDict( NAME, p, param, UNKNOWN )
transferToDict( TYPE, p, param, OBJECT )
transferToDict( DESCRIPTION, p, param )
params.append(param)
events.append(eventdata)
# configs
configs = t.configs = []
if CONFIGS in c:
keys = c[CONFIGS].keys()
keys.sort(soft_sort)
for configkey in keys:
config = c[CONFIGS][configkey]
if self.showprivate or PRIVATE not in config:
configdata = {NAME: configkey, HOST: i, TYPE: 'config', URL:getUrl(i, configkey, CONFIG)}
transferToDict( ACCESS, config, configdata )
if PRIVATE in config: configdata[ACCESS] = PRIVATE
elif PROTECTED in config: configdata[ACCESS] = PROTECTED
self.allprops.append(configdata.copy())
# completeProp(configdata, config)
self.allprops_ext.append(completeProp(configdata, config))
moduleprops.append(configdata.copy())
transferToDict( TYPE, config, configdata, OBJECT )
transferToDict( DESCRIPTION, config, configdata )
transferToDict( DEFAULT, config, configdata )
transferToDict( DEPRECATED, config, configdata, NBWS, DEPRECATED )
transferToDict( SEE, config, configdata )
transferToDict( STATIC, config, configdata )
if STATIC in config: configdata[STATIC] = STATIC
transferToDict( FINAL, config, configdata )
if FINAL in config: configdata[FINAL] = READONLY
transferToDict( WRITEONCE, config, configdata )
if WRITEONCE in config: configdata[WRITEONCE] = WRITEONCE
configs.append(configdata)
# get inherited data
inherited = t.inherited = {PROPERTIES:{}, METHODS:{}, EVENTS:{}, CONFIGS:{}, SUPERCLASS: {} }
if EXTENDS in c:
supercname = t.extends = c[EXTENDS]
if supercname in classes:
superc = classes[supercname]
getPropsFromSuperclass(superc, classes, inherited)
if USES in c:
for supercname in c[USES]:
t.uses = c[USES]
if supercname in classes:
superc = classes[supercname]
getPropsFromSuperclass(superc, classes, inherited)
#Create the superclass chain and attach it to the classInfo Object
extends = {}
for i in inherited:
for a in inherited[i]:
extends[a] = a
inherited[SUPERCLASS] = extends
classInfo[EXTENDS] = inherited
classList.append(classInfo)
# Constructor -- technically the parser can take multiple constructors
# but that does't help here
constructordata = t.constructor = {}
if CONSTRUCTORS in c:
constructor = c[CONSTRUCTORS][0]
transferToDict( DESCRIPTION, constructor, constructordata )
ret = constructordata[RETURN] = {}
if RETURN in constructor:
transferToDict( TYPE, constructor[RETURN], ret, VOID )
transferToDict( DESCRIPTION, constructor[RETURN], ret )
params = constructordata[PARAMS] = []
if PARAMS in constructor:
cp = constructor[PARAMS]
for p in cp:
param = {}
transferToDict( NAME, p, param, UNKNOWN )
transferToDict( TYPE, p, param, OBJECT )
transferToDict( DESCRIPTION, p, param )
params.append(param)
# write module splash
moduleprops.sort(allprop_sort)
t.allprops_raw = moduleprops
moduleprops_json = simplejson.dumps(moduleprops, ensure_ascii=False)
t.allprops = moduleprops_json
classList.sort(allprop_sort)
t.classList_raw = classList
t.classList = simplejson.dumps(classList, ensure_ascii=False)
self.write("%s.html" %(self.classname), t)
# clear out class name
self.classname = ""
t.classname = ""
t.filename = ""
t.properties = ""
t.methods = ""
t.events = ""
t.configs = ""
# write module splash
moduleprops.sort(allprop_sort)
t.allprops_raw = moduleprops
moduleprops_json = simplejson.dumps(moduleprops, ensure_ascii=False)
t.allprops = moduleprops_json
# log.warn('cleansed module file name: %s' %(t.cleansedmodulename));
self.write( t.cleansedmodulename + ".html", t)
# class source view
for i in m[FILE_LIST]:
log.info("Generating source view for " + i)
self.filename = i
assignGlobalProperties(t)
self.write("%s.html" %(self.filename), t)
#remove dups
allprops = []
propmap = {}
# for i in self.allprops:
for i in self.allprops_ext:
url = i[URL]
if url not in propmap:
allprops.append(i)
propmap[url] = True
allprops.sort(allprop_sort)
# self.allprops_ext.sort(allprop_sort)
allprops_json = simplejson.dumps(allprops, ensure_ascii=False)
self.write("index.json", allprops_json, False)
# index
log.info("Generating index")
t = Template(file=os.path.join(self.templatepath, "main.tmpl"))
# @TODO add command line option for timestamp
# timestamp = time.time()
timestamp = ""
t.timestamp = timestamp
self.modulename = ""
self.moduletitle = ""
self.classname = ""
self.classnames = []
for i in self.data[CLASS_MAP].keys():
if shouldShowClass(self.data[CLASS_MAP][i]):
self.classnames.append(i)
self.classnames.sort(soft_sort)
self.filenames = self.data[FILE_MAP].keys()
self.filenames.sort(soft_sort)
self.filename = ""
assignGlobalProperties(t)
t.allprops = allprops_json
t.index = True
self.write("index.html", t)
# map all classes to the corresponding module for external loaders
t = Template(file=os.path.join(self.templatepath, "classmap.tmpl"))
# @TODO add command line option for timestamp
# timestamp = time.time()
timestamp = ""
t.timestamp = timestamp
pkgMap = {}
keys = self.data[CLASS_MAP].keys()
keys.sort()
for i in keys:
try:
pkgMap[i] = self.data[CLASS_MAP][i][MODULE]
except:
try:
log.warn('class map ' + i + ' failure (no module declaration?)')
except: pass
t.pkgmap = simplejson.dumps(pkgMap, ensure_ascii=False)
self.write("classmap.js", t)
log.info(" ")
log.info("Done\n")
def main():
from optparse import OptionParser
optparser = OptionParser("usage: %prog inputdir [options] inputdir")
optparser.set_defaults(outputdir="docs",
inputfile="parsed.json",
newext=".highlighted",
showprivate=False,
project="Yahoo! UI Library",
version=""
)
optparser.add_option( "-o", "--outputdir",
action="store", dest="outputdir", type="string",
help="Directory to write the html documentation" )
optparser.add_option( "-f", "--file",
action="store", dest="inputfile", type="string",
help="The name of the file that contains the JSON doc info" )
optparser.add_option( "-t", "--template",
action="store", dest="templatedir", type="string",
help="The directory containing the html tmplate" )
optparser.add_option( "-c", "--crosslink",
action="store", dest="crosslinkdir", type="string",
help="The directory containing json data for other modules to crosslink" )
optparser.add_option( "-C", "--copyright",
action="store", dest="copyrighttag", type="string",
help="The name to use in the copyright line at the bottom of the pages." )
optparser.add_option( "-s", "--showprivate",
action="store_true", dest="showprivate",
help="Should private properties/methods be in the docs?" )
optparser.add_option( "-n", "--newextension",
action="store", dest="newext", type="string",
help="The extension to append to the syntax output file" )
optparser.add_option( "-m", "--project",
action="store", dest="project", type="string",
help="The name of the project" )
optparser.add_option( "-v", "--version",
action="store", dest="version", type="string",
help="The version of the project" )
optparser.add_option( "-u", "--projecturl",
action="store", dest="projecturl", type="string",
help="The project url" )
optparser.add_option( "-y", "--ydn",
action="store_true", dest="ydn",
help="Add YDN MyBlogLog intrumentation?" )
(options, inputdirs) = optparser.parse_args()
if len(inputdirs) > 0:
generator = DocGenerator( inputdirs[0],
options.inputfile,
options.outputdir,
options.templatedir,
options.showprivate,
options.project,
options.version,
options.projecturl,
options.ydn,
options.copyrighttag
)
generator.process()
else:
optparser.error("Incorrect number of arguments")
if __name__ == '__main__':
main()
``` |
{
"source": "jonasangstrom/uxps",
"score": 3
} |
#### File: uxps/examples/plot_multiplex.py
```python
import matplotlib.pyplot as plt
from uxps.io_functions import read_multiplex
from uxps.plotting import plot_detail
from uxps.model import Model, get_data_in_range
path = 'multiplex.txt'
mplx_dict = read_multiplex(path)
model_dict = {}
# another signing test
def plot_scaled_multiplex(mplx_dict, model_dict, offset_name=''):
# TODO plot refinements if avaliable and also plot survey
for name in mplx_dict:
detail = mplx_dict[name]
t = detail['t/step'] * detail['sweeps']
plot_detail(detail, name, scale=t,)
plt.legend()
plt.title('scaled multiplex+survey')
plt.xlim(1000, 0)
plot_scaled_multiplex(mplx_dict, model_dict)
plt.show()
print(mplx_dict)
``` |
{
"source": "JonasAraujoP/Python",
"score": 4
} |
#### File: Python/desafios/desafio103.py
```python
def ficha(x='', y=''):
print(f"Nome: {nome}\nGols Marcado: {gol}")
print('\033[33m-=-\033[m' * 15)
print('\033[33m-=-\033[m'*15)
nome = str(input('Nome: ')).title()
if nome == '':
nome = '\033[31mDesconhecido\033[m'
gol = str(input(f'Quantos gols o jogador \033[34m{nome}\033[m marcou? '))
if gol.isnumeric():
gol = int(gol)
else:
gol = '\033[31m"0 gol(s)"\033[m'
print('\033[33m-=-\033[m'*15)
ficha(nome, gol)
```
#### File: Python/desafios/desafio113.py
```python
def leiaint(x):
n = 0
while True:
try:
n = int(input(x))
except (KeyboardInterrupt):
print('\033[31mEntrada de dados interrompida pelo usuário.\033[m')
return 0
except:
print(f'\033[31mErro. O número \"{n}\" é inválido.\033[m')
continue
else:
return n
def leiafloat(x):
n = 0
while True:
try:
n = float(input(x))
except (KeyboardInterrupt):
print('\033[31mEntrada de dados interrompida pelo usuário.\033[m')
return 0
except:
print(f'\033[31mErro. O número \"{n}\" é inválido.\033[m')
continue
else:
return n
inteiro = leiaint('Digite um número inteiro: ')
m = leiafloat('Digite um número real: ')
print()
print(f'O valor inteiro digitado foi {inteiro} e o real foi {m}')
``` |
{
"source": "jona-sassenhagen/python_for_psychologists",
"score": 3
} |
#### File: hog2/experiment/Unser experiment.py
```python
from __future__ import unicode_literals, print_function
from psychopy import visual, event, core, monitors
from random import choice
m = monitors.Monitor("default", width=28.8, distance=200)
m.setSizePix([800, 600])
win = visual.Window(
allowGUI=False, monitor=m,
bitsMode=None,
winType='pyglet', rgb=1,
fullscr=False,
screen=1, units="pix"
)
clock = core.Clock()
logfile = "log" + str(clock.getTime()) + ".csv"
with open(logfile, "w") as f:
print("block,trial,congruence,label,correct,rt", file=f)
n_experiment_blocks = 4
def draw_and_wait(stim, time=.495):
"Draw one stimulus, flip, wait."
stim.draw()
win.flip()
core.wait(time)
def one_trial(congruence, label):
"Run one single trial, return RT and if the correct answer was given."
stim = visual.TextStim(win, text="o", color=(-1, -1, -1))
draw_and_wait(stim)
if label == 'Ich':
stim = visual.TextStim(win, text='Ich', pos=(-250,0), color=(-1, -1, -1))
stim.draw()
if congruence == True:
fname = 'snake.png'
stim = visual.ImageStim(win, fname, pos=(250,0), size=(200,200))
stim.draw()
else:
fname = 'spider.png'
stim = visual.ImageStim(win, fname, pos=(250,0), size=(200,200))
stim.draw()
else:
stim = visual.TextStim(win, text='Stuhl', pos=(-250,0), color=(-1, -1, -1))
stim.draw()
if congruence == True:
fname = 'spider.png'
stim = visual.ImageStim(win, fname, pos=(250,0), size=(200,200))
stim.draw()
else:
fname = 'snake.png'
stim = visual.ImageStim(win, fname, pos=(250,0), size=(200,200))
stim.draw()
win.flip()
clock.reset()
try:
key, time = event.waitKeys(keyList=['m', 'c', 'q'], maxWait=3, timeStamped=clock)[0]
except TypeError:
key, time = "miss", -999
if key == "q":
win.close()
core.quit()
if congruence == True:
correct = key == 'm'
else:
correct = key == 'c'
if not correct:
fname = 'error.png'
stim = visual.ImageStim(win, fname, size=(200,200))
draw_and_wait(stim)
return time, correct
def display_text(text):
"Display text and wait for a keypress."
stim = visual.TextStim(win, text= text, color=(-1, -1, -1))
stim.draw()
win.flip()
event.waitKeys()
win.flip()
core.wait(.495)
def one_block(n, n_trials=8):
labels = ("Ich", "Stuhl")
congruences = (True, False)
for trial in range(n_trials):
label = choice(labels)
congruence = choice(congruences)
time, correct = one_trial(congruence, label)
with open(logfile, "a") as f:
print(n, trial, str(congruence), label, str(correct), str(time),sep=",", file=f)
if n+1 == n_experiment_blocks:
text = ("Thank you again for your participation. Press any key to finish.")
else:
text = ("This was block" + str(n + 1) + " of " + str(n_experiment_blocks) + " blocks in total. Press any key to continue.")
display_text(text)
text = ("""
Welcome to this experiment and thank you for participation!
Please,
- Press 'm' if 'Ich' or 'Stuhl' matches the associated picture.
- Press 'c' if .'Ich' or 'Stuhl' do not match the associated picture.
Press any key to continue.
""")
display_text(text)
for block in range(n_experiment_blocks):
one_block(block)
win.close()
core.quit()
``` |
{
"source": "jona-sassenhagen/trafaret",
"score": 2
} |
#### File: trafaret/tests/test_contrib.py
```python
import datetime
import pytest
from dateutil.tz import tzutc, tzoffset
import trafaret as t
from trafaret import DataError
from trafaret.contrib.rfc_3339 import DateTime, Date
class TestDateTime:
def test_datetime(self):
check = DateTime()
assert check('2017-09-01 23:59') == datetime.datetime(2017, 9, 1, 23, 59)
assert check('Fri Sep 1 23:59:59 UTC 2017') == datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzutc())
assert check('Fri Sep 1 23:59:59 2017') == datetime.datetime(2017, 9, 1, 23, 59, 59)
assert check('Fri, 1 Sep 2017 23:59:59 -0300') == datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzoffset(None, -10800)) # noqa
assert check('2017-09-01T23:59:59.5-03:00') == datetime.datetime(2017, 9, 1, 23, 59, 59, 500000, tzinfo=tzoffset(None, -10800)) # noqa
assert check('20170901T235959.5-0300') == datetime.datetime(2017, 9, 1, 23, 59, 59, 500000, tzinfo=tzoffset(None, -10800)) # noqa
assert check('20170901T235959-0300') == datetime.datetime(2017, 9, 1, 23, 59, 59, tzinfo=tzoffset(None, -10800)) # noqa
assert check('2017-09-01T23:59:59') == datetime.datetime(2017, 9, 1, 23, 59, 59)
assert check('20170901T235959') == datetime.datetime(2017, 9, 1, 23, 59, 59)
assert check('20170901235959') == datetime.datetime(2017, 9, 1, 23, 59, 59)
assert check('2017-09-01T23:59') == datetime.datetime(2017, 9, 1, 23, 59)
assert check('20170901T2359') == datetime.datetime(2017, 9, 1, 23, 59)
assert check('2017-09-01T23') == datetime.datetime(2017, 9, 1, 23)
assert check('20170901T23') == datetime.datetime(2017, 9, 1, 23)
assert check('2017-09-01') == datetime.datetime(2017, 9, 1)
assert check('20170901') == datetime.datetime(2017, 9, 1)
assert check('09-01-2017') == datetime.datetime(2017, 9, 1)
assert check('09-01-17') == datetime.datetime(2017, 9, 1)
assert check('2017.Sep.01') == datetime.datetime(2017, 9, 1)
assert check('2017/09/01') == datetime.datetime(2017, 9, 1)
assert check('2017 09 01') == datetime.datetime(2017, 9, 1)
assert check('1st of September 2017') == datetime.datetime(2017, 9, 1)
# Note: to equality here we need to pass extra params to parse() method
assert check('01-09-2017') != datetime.datetime(2017, 9, 1)
def test_datetime_blank(self):
check = DateTime(allow_blank=True)
with pytest.raises(DataError):
check('')
def test_nullable_datetime(self):
nullable_datetime = t.Or(DateTime, t.Null)
assert nullable_datetime.check(None) is None
assert nullable_datetime.check(datetime.datetime(2017, 9, 1, 23, 59)) == datetime.datetime(2017, 9, 1, 23, 59)
assert nullable_datetime.check('2017-09-01 23:59') == datetime.datetime(2017, 9, 1, 23, 59)
def test_repr(self):
assert repr(DateTime()) == '<DateTime>'
assert repr(DateTime(allow_blank=True)) == '<DateTime(blank)>'
class TestDate:
@pytest.mark.parametrize('value', [
datetime.date(1954, 7, 29),
datetime.datetime(1954, 7, 29, 23, 59),
'1954-07-29',
'29 July 1954',
'29.07.1954',
'29/07/1954',
'07/29/1954',
])
def test_date(self, value):
expected_result = datetime.date(1954, 7, 29)
assert Date()(value) == expected_result
def test_date_blank(self):
check = Date(allow_blank=True)
with pytest.raises(DataError):
check('')
def test_date_parse_failed(self):
check = Date()
with pytest.raises(DataError):
check('29071954')
assert check('290754') != datetime.date(1954, 7, 29)
def test_nullable_date(self):
nullable_date = t.Or(Date, t.Null)
assert nullable_date.check(None) is None
assert nullable_date.check(datetime.date(1954, 7, 29)) == datetime.date(1954, 7, 29)
assert nullable_date.check('1954-07-29') == datetime.date(1954, 7, 29)
def test_repr(self):
assert repr(Date()) == '<Date>'
assert repr(Date(allow_blank=True)) == '<Date(blank)>'
```
#### File: trafaret/tests/test_dataerror.py
```python
import pytest
import trafaret as t
def test_dataerror_value():
error = t.DataError(error='Wait for good value', value='BAD ONE', code='bad_value')
assert error.as_dict() == 'Wait for good value'
assert error.as_dict(value=True) == "Wait for good value, got 'BAD ONE'"
assert error.to_struct() == {
'code': 'bad_value',
'message': 'Wait for good value',
}
def test_nested_dataerror_value():
error = t.DataError(
error={0: t.DataError(error='Wait for good value', value='BAD ONE', code='bad_value')},
code='some_elements_going_mad',
)
assert error.as_dict() == {0: 'Wait for good value'}
assert error.as_dict(value=True) == {0: "Wait for good value, got 'BAD ONE'"}
assert error.to_struct() == {
'code': 'some_elements_going_mad',
'nested': {0: {
'code': 'bad_value',
'message': 'Wait for good value',
}},
}
assert error.to_struct(value=True) == {
'code': 'some_elements_going_mad',
'nested': {0: {
'code': 'bad_value',
'message': "Wait for good value, got 'BAD ONE'",
}},
}
def test_dataerror_wrong_arg():
with pytest.raises(RuntimeError):
t.DataError(123)
def test_repr():
assert repr(t.DataError('error')) == "DataError('error')"
```
#### File: trafaret/trafaret/dataerror.py
```python
from .lib import _empty, STR_TYPES
class DataError(ValueError):
"""
Error with data preserve
error can be a message or None if error raised in childs
data can be anything
"""
__slots__ = ['error', 'name', 'value', 'trafaret', 'code']
error_code = 'unknown'
def __init__(self, error=None, name=None, value=_empty, trafaret=None, code=None):
"""
:attribute error: can be a string or a dict[string, dataerror]
:attribute name:
:attribute value: validated value that leads to this error
:attribute trafaret: trafaret raised error
:attribute code: code for error, like `value_is_too_big`
"""
if not isinstance(error, STR_TYPES + (dict, )):
raise RuntimeError('Only str or dict is supported, got %r' % error)
self.error = error
self.name = name
self.value = value
self.trafaret = trafaret
self.code = code or self.__class__.error_code
# if self.code == 'unknown':
# raise RuntimeError()
def __str__(self, value=False):
if value and self.value != _empty:
return '%s, got %r' % (str(self.error), self.value)
else:
return str(self.error)
def __repr__(self):
return 'DataError(%r)' % str(self)
def to_struct(self, value=False):
if isinstance(self.error, dict):
return {
'code': self.code,
'nested': dict(
(k, v.to_struct(value=value) if isinstance(v, DataError) else v)
for k, v in self.error.items()
),
}
return {
'code': self.code,
'message': self.__str__(value=value),
}
def as_dict(self, value=False):
"""Use `to_struct` if need consistency"""
if not isinstance(self.error, dict):
return self.__str__(value=value)
return dict(
(k, v.as_dict(value=value) if isinstance(v, DataError) else v)
for k, v in self.error.items()
)
```
#### File: trafaret/trafaret/lib.py
```python
import sys
import inspect
try:
from collections.abc import (
Mapping as AbcMapping,
Iterable,
)
except ImportError: # pragma: no cover
from collections import (
Mapping as AbcMapping,
Iterable,
)
py3 = sys.version_info[0] == 3
py36 = sys.version_info >= (3, 6, 0)
if py3:
getargspec = inspect.getfullargspec
STR_TYPES = (str, bytes)
else: # pragma: no cover
getargspec = inspect.getargspec
STR_TYPES = (basestring,) # noqa
_empty = object()
def py3metafix(cls):
if not py3: # pragma: no cover
return cls
else:
newcls = cls.__metaclass__(cls.__name__, (cls,), {})
newcls.__doc__ = cls.__doc__
return newcls
class WithContextCaller(object):
def __init__(self, func):
self.func = func
if hasattr(self.func, 'async_call'):
self.async_call = self.func.async_call
def __call__(self, value, context=None):
return self.func(value, context=context)
class WithoutContextCaller(WithContextCaller):
def __call__(self, value, context=None):
return self.func(value)
def with_context_caller(callble):
if isinstance(callble, WithContextCaller):
return callble
if not inspect.isfunction(callble) and hasattr(callble, '__call__'):
args = getargspec(callble.__call__).args
else:
args = getargspec(callble).args
if 'context' in args:
return WithContextCaller(callble)
else:
return WithoutContextCaller(callble)
def get_callable_args(fn):
if inspect.isfunction(fn) or inspect.ismethod(fn):
inspectable = fn
elif inspect.isclass(fn):
inspectable = fn.__init__
elif hasattr(fn, '__call__'):
inspectable = fn.__call__
else:
inspectable = fn
try:
spec = getargspec(inspectable)
except TypeError:
return ()
# check if callble is bound method
if hasattr(fn, '__self__'):
spec.args.pop(0) # remove `self` from args
return spec.args
__all__ = (
AbcMapping,
Iterable,
py3,
py36,
getargspec,
STR_TYPES,
py3metafix,
WithContextCaller,
WithoutContextCaller,
with_context_caller,
get_callable_args,
)
```
#### File: trafaret/trafaret/numeric.py
```python
import decimal
import numbers
from .base import Trafaret, TrafaretMeta
from .lib import (
py3metafix,
STR_TYPES,
)
from . import codes
class NumberMeta(TrafaretMeta):
"""
Allows slicing syntax for min and max arguments for
number trafarets
>>> Int[1:]
<Int(gte=1)>
>>> Int[1:10]
<Int(gte=1, lte=10)>
>>> Int[:10]
<Int(lte=10)>
>>> Float[1:]
<Float(gte=1)>
>>> Int > 3
<Int(gt=3)>
>>> 1 < (Float < 10)
<Float(gt=1, lt=10)>
>>> (Int > 5).check(10)
10
>>> extract_error(Int > 5, 1)
'value should be greater than 5'
>>> (Int < 3).check(1)
1
>>> extract_error(Int < 3, 3)
'value should be less than 3'
"""
def __getitem__(cls, slice_):
return cls(gte=slice_.start, lte=slice_.stop)
def __lt__(cls, lt):
return cls(lt=lt)
def __le__(cls, lte):
return cls(lte=lte)
def __gt__(cls, gt):
return cls(gt=gt)
def __ge__(cls, gte):
return cls(gte=gte)
@py3metafix
class Float(Trafaret):
"""
Tests that value is a float or a string that is convertable to float.
>>> Float()
<Float>
>>> Float(gte=1)
<Float(gte=1)>
>>> Float(lte=10)
<Float(lte=10)>
>>> Float(gte=1, lte=10)
<Float(gte=1, lte=10)>
>>> Float().check(1.0)
1.0
>>> extract_error(Float(), 1 + 3j)
'value is not float'
>>> extract_error(Float(), 1)
1.0
>>> Float(gte=2).check(3.0)
3.0
>>> extract_error(Float(gte=2), 1.0)
'value is less than 2'
>>> Float(lte=10).check(5.0)
5.0
>>> extract_error(Float(lte=3), 5.0)
'value is greater than 3'
>>> Float().check("5.0")
5.0
"""
__metaclass__ = NumberMeta
convertable = STR_TYPES + (numbers.Real,)
value_type = float
def __init__(self, gte=None, lte=None, gt=None, lt=None):
self.gte = gte
self.lte = lte
self.gt = gt
self.lt = lt
def _converter(self, value):
if not isinstance(value, self.convertable):
self._failure(
'value is not %s' % self.value_type.__name__,
value=value,
code=codes.WRONG_TYPE,
)
try:
return self.value_type(value)
except ValueError:
self._failure(
"value can't be converted to %s" % self.value_type.__name__,
value=value,
code=codes.IS_NOT_A_NUMBER,
)
def _check(self, data):
if not isinstance(data, self.value_type):
value = self._converter(data)
else:
value = data
if self.gte is not None and value < self.gte:
self._failure("value is less than %s" % self.gte, value=data, code=codes.TOO_SMALL)
if self.lte is not None and value > self.lte:
self._failure("value is greater than %s" % self.lte, value=data, code=codes.TOO_BIG)
if self.lt is not None and value >= self.lt:
self._failure("value should be less than %s" % self.lt, value=data, code=codes.TOO_BIG)
if self.gt is not None and value <= self.gt:
self._failure("value should be greater than %s" % self.gt, value=data, code=codes.TOO_SMALL)
return value
def check_and_return(self, data):
self._check(data)
return data
def __lt__(self, lt):
return type(self)(gte=self.gte, lte=self.lte, gt=self.gt, lt=lt)
def __le__(self, lte):
return type(self)(gte=self.gte, lte=lte, gt=self.gt, lt=self.lt)
def __gt__(self, gt):
return type(self)(gte=self.gte, lte=self.lte, gt=gt, lt=self.lt)
def __ge__(self, gte):
return type(self)(gte=gte, lte=self.lte, gt=self.gt, lt=self.lt)
def __repr__(self):
r = "<%s" % type(self).__name__
options = []
for param in ("gte", "lte", "gt", "lt"):
if getattr(self, param) is not None:
options.append("%s=%s" % (param, getattr(self, param)))
if options:
r += "(%s)" % (", ".join(options))
r += ">"
return r
class ToFloat(Float):
"""Checks that value is a float.
Or if value is a string converts this string to float
"""
def check_and_return(self, data):
return self._check(data)
class Int(Float):
"""
>>> Int()
<Int>
>>> Int().check(5)
5
>>> extract_error(Int(), 1.1)
'value is not int'
>>> extract_error(Int(), 1 + 1j)
'value is not int'
"""
value_type = int
def _converter(self, value):
if isinstance(value, float):
if not value.is_integer():
self._failure('value is not int', value=value, code=codes.IS_NOT_INT)
return super(Int, self)._converter(value)
class ToInt(Int):
def check_and_return(self, data):
return self._check(data)
class ToDecimal(Float):
value_type = decimal.Decimal
def check_and_return(self, data):
return self._check(data)
def _converter(self, value):
try:
return self.value_type(value)
except (ValueError, decimal.InvalidOperation):
self._failure(
'value can\'t be converted to Decimal',
value=value,
code=codes.INVALID_DECIMAL,
)
``` |
{
"source": "jona-sassenhagen/URIAL",
"score": 3
} |
#### File: jona-sassenhagen/URIAL/plot_rdm.py
```python
def plot_rdm(rdm, mat=0, cmap=None):
'''function to visualize RDM based rank transformed and scaled similarity values
(only for plotting, raw/initial values remain unchanged'''
from os.path import join as opj
from scipy.io.matlab import loadmat
from nilearn.connectome import sym_matrix_to_vec
from scipy.stats import rankdata
from nilearn.connectome import vec_to_sym_matrix
from sklearn import preprocessing
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
if mat == 1:
matfile = loadmat(rdm)
rdm = matfile['rdm'][0][0]
if cmap == None:
cmap = 'Spectral_r'
else:
cmap = cmap
rdm = pd.read_csv(rdm, sep=',')
if 'Unnamed: 0' in rdm:
del rdm['Unnamed: 0']
categories = list(rdm.columns)
rdm = rdm.as_matrix()
rdm_vec = sym_matrix_to_vec(rdm)
rdm_vec = rankdata(rdm_vec)
min_max_scaler = preprocessing.MinMaxScaler(feature_range=(0, 1), copy=True)
rdm_array = rdm_vec.reshape(-1, 2)
rdm_array = min_max_scaler.fit_transform(rdm_array)
rdm_array = rdm_array.flatten()
rdm_rank_scale = vec_to_sym_matrix(rdm_array)
ax = sns.heatmap(rdm_rank_scale, xticklabels=categories, yticklabels=categories, cmap=cmap)
ax.set_yticklabels(categories, rotation=0)
ax.xaxis.tick_top()
ax.set_xticklabels(categories, rotation=90)
ax.collections[0].colorbar.set_label("pairwise similarities (iMDS), rank transformed & scaled [0,1]")
plt.tight_layout()
``` |
{
"source": "jonasauda/understanding_drone_landing",
"score": 2
} |
#### File: drone_control/drones/CrazyFlie.py
```python
from drones.drone import Drone, State
import json
import socket
import time
import numpy as np
import serial
from scipy.spatial.transform import Rotation as R
import cf_firmware.cflib
from cf_firmware.cflib.crazyflie import Crazyflie
from logger import Logger
from pid import PID
class CrazyFlie(Drone):
def __init__(self, name, type):
super().__init__(name, type)
self.safety_radius = 1000
print("Init CrazyFlie...")
self.crazyflie = Crazyflie()
cf_firmware.cflib.crtp.init_drivers()
self.crazyflie.open_link("radio://0/80/2M")
self.crazyflie.connected
time.sleep(1)
#self.crazyflie.commander.set_client_xmode(True)
self.crazy_ready = True
self.state = State.INITIALIZED
self.cmd_count = 0
'''
self.serial_connection = serial.Serial()
self.serial_connection.baudrate = 115200
self.serial_connection.port = "COM3"
self.serial_connection.open()
'''
print("CrazyFlie init!")
self.start_with_zero()
def steer(self, yaw, pitch, roll, throttle, drone_position):
limit_roll_pitch = 20
gain_roll_pitch = 20
yaw_min = -30
yaw_max = 30
pitch_min = -limit_roll_pitch
pitch_max = limit_roll_pitch
roll_min = -limit_roll_pitch
roll_max = limit_roll_pitch
throttle_min = 0
throttle_max = 65000
# print("pitch:", pitch, "roll:", roll)
yaw = yaw * 90.0
pitch = pitch * gain_roll_pitch
roll = roll * gain_roll_pitch
hover_throttle = 42500.0 # + 900 # 42500 looks good for hovering
throttle = (65000.0 - hover_throttle) * throttle + hover_throttle
yaw = int(np.clip(yaw, a_min=yaw_min, a_max=yaw_max))
pitch = int(np.clip(pitch, a_min=pitch_min, a_max=pitch_max))
roll = int(np.clip(roll, a_min=roll_min, a_max=roll_max))
throttle = int(np.clip(throttle, a_min=throttle_min, a_max=throttle_max))
#
# yaw = 0
# pitch = 0
# yaw = 0
# roll = 0
if drone_position[1] < 250:
roll = 0 # positive: right
pitch = 0 # positive: forward
# throttle = 0
if self.cmd_count % 10 == 0:
if self.target_reached:
# print("CrazyFlie: Disarmed")
self.crazyflie.commander.send_stop_setpoint()
pass
else:
self.crazyflie.commander.send_setpoint(
roll,
pitch,
yaw,
throttle
)
self.cmd_count = 0
#print("CrazyFlie: PID trimmed yaw =", yaw, "pitch =", pitch, "roll =", roll, "throttle =", throttle)
self.cmd_count += 1
time.sleep(0.02)
def start_with_zero(self):
self.crazyflie.commander.send_setpoint(0, 0, 0, 0)
print("init with 0000!")
```
#### File: understanding_drone_landing/drone_control/input_controller.py
```python
from pynput import keyboard
from drones import drone
state = 0
class KeyController():
def __init__(self, drone_controller):
print("starting key control...")
self.drone_controller = drone_controller
try:
self.drone = self.drone_controller.drones["CrazyFlie"]
except KeyError:
print("No CrazyFlie")
try:
self.drone = self.drone_controller.drones["PD"]
except KeyError:
print("No PD")
self.state = 3
self.pid = None
self.tuning_interval = 0.001
def on_press(self, key):
if key == keyboard.Key.esc:
return False # stop listener
try:
k = key.char # single-char keys
except:
k = key.name # other keys
# self.keys.append(k) # store it in global-like variable
print('Key pressed: ' + k)
if k == 'up':
throttle = self.drone.throttle + 10
if throttle > 1900:
throttle = 1900
self.drone.throttle = throttle
if k == 'down':
throttle = self.drone.throttle - 10
if throttle < 900:
throttle = 900
self.drone.throttle = throttle
if k == 'left':
# flieg links
throttle = self.drone.ppm[1]
pitch = self.drone.ppm[3]
roll = self.drone.ppm[0]
self.drone.ppm[2] -= 10
yaw = self.drone.ppm[2]
self.drone.command(throttle, pitch, yaw, roll)
pass
if k == 'right':
# flieg rechts
throttle = self.drone.ppm[1]
pitch = self.drone.ppm[3]
roll = self.drone.ppm[0]
self.drone.ppm[2] += 10
yaw = self.drone.ppm[2]
#self.drone.command(throttle, pitch, yaw, roll)
pass
if k == 'a':
# increase p
term = self.pid.Kp + self.tuning_interval
self.pid.setKp(term)
drone.save_cfg(self.drone)
print("P:", term)
if k == 'y':
# decrease p
p = self.pid.Kp - self.tuning_interval
self.pid.setKp(p)
drone.save_cfg(self.drone)
print("P:", p)
if k == 's':
# increase i
term = self.pid.Ki + self.tuning_interval
self.pid.setKi(term)
drone.save_cfg(self.drone)
print("I:", term)
if k == 'x':
# decrease i
term = self.pid.Ki - self.tuning_interval
self.pid.setKi(term)
drone.save_cfg(self.drone)
print("I:", term)
if k == 'd':
# increase d
term = self.pid.Kd + self.tuning_interval
self.pid.setKd(term)
drone.save_cfg(self.drone)
print("I:", term)
if k == 'c':
# decrease d
term = self.pid.Kd - self.tuning_interval
self.pid.setKd(term)
drone.save_cfg(self.drone)
print("I:", term)
if k == 'q':
print("Pressing target reached")
self.drone.target_reached = True
if k == 'k':
self.drone.set_armed(not self.drone.armed)
def change_mode(self):
# 0 = pitch
# 1 = yaw
# 2 = roll
# 3 = throttle
self.state = (self.state + 1) % 4
if self.state == 0:
self.pid = self.drone.pid_pitch
print("PID: pitch")
if self.state == 1:
self.pid = self.drone.pid_yaw
print("PID: yaw")
if self.state == 2:
self.pid = self.drone.pid_roll
print("PID: roll")
if self.state == 3:
self.pid = self.drone.pid_throttle
print("PID: throttle")
def on_release(self, key):
pass
def start_key_control(self):
listener = keyboard.Listener(on_press=self.on_press, on_release=self.on_release)
listener.start() # start to listen on a separate thread
``` |
{
"source": "JonasBaeumer/VisualizerBubbleSortPython",
"score": 4
} |
#### File: VisualizerBubbleSortPython/Code/bubblesort.py
```python
def bubble_sort(array):
if array is None:
print("Try again.")
for j in range(len(array), 0, -1):
# Walk through the array from left to right
for i in range(len(array)-1):
# Compare the current element with the next right one
if array[i] > array[i+1]:
#packing and unpacking
array[i], array[i+1] = array[i+1], array[i]
``` |
{
"source": "jonasbfranco/bpytop",
"score": 2
} |
#### File: jonasbfranco/bpytop/bpytop.py
```python
import os, sys, io, threading, signal, re, subprocess, logging, logging.handlers, argparse
import urllib.request
from time import time, sleep, strftime, localtime
from datetime import timedelta
from _thread import interrupt_main
from collections import defaultdict
from select import select
from distutils.util import strtobool
from string import Template
from math import ceil, floor
from random import randint
from shutil import which
from typing import List, Set, Dict, Tuple, Optional, Union, Any, Callable, ContextManager, Iterable, Type, NamedTuple
errors: List[str] = []
try: import fcntl, termios, tty, pwd
except Exception as e: errors.append(f'{e}')
try: import psutil # type: ignore
except Exception as e: errors.append(f'{e}')
SELF_START = time()
SYSTEM: str
if "linux" in sys.platform: SYSTEM = "Linux"
elif "bsd" in sys.platform: SYSTEM = "BSD"
elif "darwin" in sys.platform: SYSTEM = "MacOS"
else: SYSTEM = "Other"
if errors:
print("ERROR!")
print("\n".join(errors))
if SYSTEM == "Other":
print("\nUnsupported platform!\n")
else:
print("\nInstall required modules!\n")
raise SystemExit(1)
VERSION: str = "1.0.63"
#? Argument parser ------------------------------------------------------------------------------->
args = argparse.ArgumentParser()
args.add_argument("-b", "--boxes", action="store", dest="boxes", help = "which boxes to show at start, example: -b \"cpu mem net proc\"")
args.add_argument("-lc", "--low-color", action="store_true", help = "disable truecolor, converts 24-bit colors to 256-color")
args.add_argument("-v", "--version", action="store_true", help = "show version info and exit")
args.add_argument("--debug", action="store_true", help = "start with loglevel set to DEBUG overriding value set in config")
stdargs = args.parse_args()
if stdargs.version:
print(f'bpytop version: {VERSION}\n'
f'psutil version: {".".join(str(x) for x in psutil.version_info)}')
raise SystemExit(0)
ARG_BOXES: str = stdargs.boxes
LOW_COLOR: bool = stdargs.low_color
DEBUG: bool = stdargs.debug
#? Variables ------------------------------------------------------------------------------------->
BANNER_SRC: List[Tuple[str, str, str]] = [
("#ffa50a", "#0fd7ff", "██████╗ ██████╗ ██╗ ██╗████████╗ ██████╗ ██████╗"),
("#f09800", "#00bfe6", "██╔══██╗██╔══██╗╚██╗ ██╔╝╚══██╔══╝██╔═══██╗██╔══██╗"),
("#db8b00", "#00a6c7", "██████╔╝██████╔╝ ╚████╔╝ ██║ ██║ ██║██████╔╝"),
("#c27b00", "#008ca8", "██╔══██╗██╔═══╝ ╚██╔╝ ██║ ██║ ██║██╔═══╝ "),
("#a86b00", "#006e85", "██████╔╝██║ ██║ ██║ ╚██████╔╝██║"),
("#000000", "#000000", "╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝"),
]
#*?This is the template used to create the config file
DEFAULT_CONF: Template = Template(f'#? Config file for bpytop v. {VERSION}' + '''
#* Color theme, looks for a .theme file in "/usr/[local/]share/bpytop/themes" and "~/.config/bpytop/themes", "Default" for builtin default theme.
#* Prefix name by a plus sign (+) for a theme located in user themes folder, i.e. color_theme="+monokai"
color_theme="$color_theme"
#* If the theme set background should be shown, set to False if you want terminal background transparency
theme_background=$theme_background
#* Sets if 24-bit truecolor should be used, will convert 24-bit colors to 256 color (6x6x6 color cube) if false.
truecolor=$truecolor
#* Manually set which boxes to show. Available values are "cpu mem net proc", seperate values with whitespace.
shown_boxes="$shown_boxes"
#* Update time in milliseconds, increases automatically if set below internal loops processing time, recommended 2000 ms or above for better sample times for graphs.
update_ms=$update_ms
#* Processes update multiplier, sets how often the process list is updated as a multiplier of "update_ms".
#* Set to 2 or higher to greatly decrease bpytop cpu usage. (Only integers)
proc_update_mult=$proc_update_mult
#* Processes sorting, "pid" "program" "arguments" "threads" "user" "memory" "cpu lazy" "cpu responsive",
#* "cpu lazy" updates top process over time, "cpu responsive" updates top process directly.
proc_sorting="$proc_sorting"
#* Reverse sorting order, True or False.
proc_reversed=$proc_reversed
#* Show processes as a tree
proc_tree=$proc_tree
#* Which depth the tree view should auto collapse processes at
tree_depth=$tree_depth
#* Use the cpu graph colors in the process list.
proc_colors=$proc_colors
#* Use a darkening gradient in the process list.
proc_gradient=$proc_gradient
#* If process cpu usage should be of the core it's running on or usage of the total available cpu power.
proc_per_core=$proc_per_core
#* Show process memory as bytes instead of percent
proc_mem_bytes=$proc_mem_bytes
#* Sets the CPU stat shown in upper half of the CPU graph, "total" is always available, see:
#* https://psutil.readthedocs.io/en/latest/#psutil.cpu_times for attributes available on specific platforms.
#* Select from a list of detected attributes from the options menu
cpu_graph_upper="$cpu_graph_upper"
#* Sets the CPU stat shown in lower half of the CPU graph, "total" is always available, see:
#* https://psutil.readthedocs.io/en/latest/#psutil.cpu_times for attributes available on specific platforms.
#* Select from a list of detected attributes from the options menu
cpu_graph_lower="$cpu_graph_lower"
#* Toggles if the lower CPU graph should be inverted.
cpu_invert_lower=$cpu_invert_lower
#* Set to True to completely disable the lower CPU graph.
cpu_single_graph=$cpu_single_graph
#* Shows the system uptime in the CPU box.
show_uptime=$show_uptime
#* Check cpu temperature, needs "osx-cpu-temp" on MacOS X.
check_temp=$check_temp
#* Which sensor to use for cpu temperature, use options menu to select from list of available sensors.
cpu_sensor=$cpu_sensor
#* Show temperatures for cpu cores also if check_temp is True and sensors has been found
show_coretemp=$show_coretemp
#* Which temperature scale to use, available values: "celsius", "fahrenheit", "kelvin" and "rankine"
temp_scale="$temp_scale"
#* Draw a clock at top of screen, formatting according to strftime, empty string to disable.
draw_clock="$draw_clock"
#* Update main ui in background when menus are showing, set this to false if the menus is flickering too much for comfort.
background_update=$background_update
#* Custom cpu model name, empty string to disable.
custom_cpu_name="$custom_cpu_name"
#* Optional filter for shown disks, should be full path of a mountpoint, separate multiple values with a comma ",".
#* Begin line with "exclude=" to change to exclude filter, oterwise defaults to "most include" filter. Example: disks_filter="exclude=/boot, /home/user"
disks_filter="$disks_filter"
#* Show graphs instead of meters for memory values.
mem_graphs=$mem_graphs
#* If swap memory should be shown in memory box.
show_swap=$show_swap
#* Show swap as a disk, ignores show_swap value above, inserts itself after first disk.
swap_disk=$swap_disk
#* If mem box should be split to also show disks info.
show_disks=$show_disks
#* Filter out non physical disks. Set this to False to include network disks, RAM disks and similar.
only_physical=$only_physical
#* Read disks list from /etc/fstab. This also disables only_physical.
use_fstab=$use_fstab
#* Toggles if io stats should be shown in regular disk usage view
show_io_stat=$show_io_stat
#* Toggles io mode for disks, showing only big graphs for disk read/write speeds.
io_mode=$io_mode
#* Set to True to show combined read/write io graphs in io mode.
io_graph_combined=$io_graph_combined
#* Set the top speed for the io graphs in MiB/s (10 by default), use format "device:speed" seperate disks with a comma ",".
#* Example: "/dev/sda:100, /dev/sdb:20"
io_graph_speeds="$io_graph_speeds"
#* Set fixed values for network graphs, default "10M" = 10 Mibibytes, possible units "K", "M", "G", append with "bit" for bits instead of bytes, i.e "100mbit"
net_download="$net_download"
net_upload="$net_upload"
#* Start in network graphs auto rescaling mode, ignores any values set above and rescales down to 10 Kibibytes at the lowest.
net_auto=$net_auto
#* Sync the scaling for download and upload to whichever currently has the highest scale
net_sync=$net_sync
#* If the network graphs color gradient should scale to bandwith usage or auto scale, bandwith usage is based on "net_download" and "net_upload" values
net_color_fixed=$net_color_fixed
#* Starts with the Network Interface specified here.
net_iface="$net_iface"
#* Show battery stats in top right if battery is present
show_battery=$show_battery
#* Show init screen at startup, the init screen is purely cosmetical
show_init=$show_init
#* Enable check for new version from github.com/aristocratos/bpytop at start.
update_check=$update_check
#* Set loglevel for "~/.config/bpytop/error.log" levels are: "ERROR" "WARNING" "INFO" "DEBUG".
#* The level set includes all lower levels, i.e. "DEBUG" will show all logging info.
log_level=$log_level
''')
CONFIG_DIR: str = f'{os.path.expanduser("~")}/.config/bpytop'
if not os.path.isdir(CONFIG_DIR):
try:
os.makedirs(CONFIG_DIR)
os.mkdir(f'{CONFIG_DIR}/themes')
except PermissionError:
print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!')
raise SystemExit(1)
CONFIG_FILE: str = f'{CONFIG_DIR}/bpytop.conf'
THEME_DIR: str = ""
if os.path.isdir(f'{os.path.dirname(__file__)}/bpytop-themes'):
THEME_DIR = f'{os.path.dirname(__file__)}/bpytop-themes'
else:
for td in ["/usr/local/", "/usr/", "/snap/bpytop/current/usr/"]:
if os.path.isdir(f'{td}share/bpytop/themes'):
THEME_DIR = f'{td}share/bpytop/themes'
break
USER_THEME_DIR: str = f'{CONFIG_DIR}/themes'
CORES: int = psutil.cpu_count(logical=False) or 1
THREADS: int = psutil.cpu_count(logical=True) or 1
THREAD_ERROR: int = 0
DEFAULT_THEME: Dict[str, str] = {
"main_bg" : "#00",
"main_fg" : "#cc",
"title" : "#ee",
"hi_fg" : "#969696",
"selected_bg" : "#7e2626",
"selected_fg" : "#ee",
"inactive_fg" : "#40",
"graph_text" : "#60",
"meter_bg" : "#40",
"proc_misc" : "#0de756",
"cpu_box" : "#3d7b46",
"mem_box" : "#8a882e",
"net_box" : "#423ba5",
"proc_box" : "#923535",
"div_line" : "#30",
"temp_start" : "#4897d4",
"temp_mid" : "#5474e8",
"temp_end" : "#ff40b6",
"cpu_start" : "#50f095",
"cpu_mid" : "#f2e266",
"cpu_end" : "#fa1e1e",
"free_start" : "#223014",
"free_mid" : "#b5e685",
"free_end" : "#dcff85",
"cached_start" : "#0b1a29",
"cached_mid" : "#74e6fc",
"cached_end" : "#26c5ff",
"available_start" : "#292107",
"available_mid" : "#ffd77a",
"available_end" : "#ffb814",
"used_start" : "#3b1f1c",
"used_mid" : "#d9626d",
"used_end" : "#ff4769",
"download_start" : "#231a63",
"download_mid" : "#4f43a3",
"download_end" : "#b0a9de",
"upload_start" : "#510554",
"upload_mid" : "#7d4180",
"upload_end" : "#dcafde",
"process_start" : "#80d0a3",
"process_mid" : "#dcd179",
"process_end" : "#d45454",
}
MENUS: Dict[str, Dict[str, Tuple[str, ...]]] = {
"options" : {
"normal" : (
"┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐",
"│ │├─┘ │ ││ ││││└─┐",
"└─┘┴ ┴ ┴└─┘┘└┘└─┘"),
"selected" : (
"╔═╗╔═╗╔╦╗╦╔═╗╔╗╔╔═╗",
"║ ║╠═╝ ║ ║║ ║║║║╚═╗",
"╚═╝╩ ╩ ╩╚═╝╝╚╝╚═╝") },
"help" : {
"normal" : (
"┬ ┬┌─┐┬ ┌─┐",
"├─┤├┤ │ ├─┘",
"┴ ┴└─┘┴─┘┴ "),
"selected" : (
"╦ ╦╔═╗╦ ╔═╗",
"╠═╣║╣ ║ ╠═╝",
"╩ ╩╚═╝╩═╝╩ ") },
"quit" : {
"normal" : (
"┌─┐ ┬ ┬ ┬┌┬┐",
"│─┼┐│ │ │ │ ",
"└─┘└└─┘ ┴ ┴ "),
"selected" : (
"╔═╗ ╦ ╦ ╦╔╦╗ ",
"║═╬╗║ ║ ║ ║ ",
"╚═╝╚╚═╝ ╩ ╩ ") }
}
MENU_COLORS: Dict[str, Tuple[str, ...]] = {
"normal" : ("#0fd7ff", "#00bfe6", "#00a6c7", "#008ca8"),
"selected" : ("#ffa50a", "#f09800", "#db8b00", "#c27b00")
}
#? Units for floating_humanizer function
UNITS: Dict[str, Tuple[str, ...]] = {
"bit" : ("bit", "Kib", "Mib", "Gib", "Tib", "Pib", "Eib", "Zib", "Yib", "Bib", "GEb"),
"byte" : ("Byte", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "BiB", "GEB")
}
SUBSCRIPT: Tuple[str, ...] = ("₀", "₁", "₂", "₃", "₄", "₅", "₆", "₇", "₈", "₉")
SUPERSCRIPT: Tuple[str, ...] = ("⁰", "¹", "²", "³", "⁴", "⁵", "⁶", "⁷", "⁸", "⁹")
#? Setup error logger ---------------------------------------------------------------->
try:
errlog = logging.getLogger("ErrorLogger")
errlog.setLevel(logging.DEBUG)
eh = logging.handlers.RotatingFileHandler(f'{CONFIG_DIR}/error.log', maxBytes=1048576, backupCount=4)
eh.setLevel(logging.DEBUG)
eh.setFormatter(logging.Formatter("%(asctime)s | %(levelname)s: %(message)s", datefmt="%d/%m/%y (%X)"))
errlog.addHandler(eh)
except PermissionError:
print(f'ERROR!\nNo permission to write to "{CONFIG_DIR}" directory!')
raise SystemExit(1)
#? Timers for testing and debugging -------------------------------------------------------------->
class TimeIt:
timers: Dict[str, float] = {}
paused: Dict[str, float] = {}
@classmethod
def start(cls, name):
cls.timers[name] = time()
@classmethod
def pause(cls, name):
if name in cls.timers:
cls.paused[name] = time() - cls.timers[name]
del cls.timers[name]
@classmethod
def stop(cls, name):
if name in cls.timers:
total: float = time() - cls.timers[name]
del cls.timers[name]
if name in cls.paused:
total += cls.paused[name]
del cls.paused[name]
errlog.debug(f'{name} completed in {total:.6f} seconds')
def timeit_decorator(func):
def timed(*args, **kw):
ts = time()
out = func(*args, **kw)
errlog.debug(f'{func.__name__} completed in {time() - ts:.6f} seconds')
return out
return timed
#? Set up config class and load config ----------------------------------------------------------->
class Config:
'''Holds all config variables and functions for loading from and saving to disk'''
keys: List[str] = ["color_theme", "update_ms", "proc_sorting", "proc_reversed", "proc_tree", "check_temp", "draw_clock", "background_update", "custom_cpu_name",
"proc_colors", "proc_gradient", "proc_per_core", "proc_mem_bytes", "disks_filter", "update_check", "log_level", "mem_graphs", "show_swap",
"swap_disk", "show_disks", "use_fstab", "net_download", "net_upload", "net_auto", "net_color_fixed", "show_init", "theme_background",
"net_sync", "show_battery", "tree_depth", "cpu_sensor", "show_coretemp", "proc_update_mult", "shown_boxes", "net_iface", "only_physical",
"truecolor", "io_mode", "io_graph_combined", "io_graph_speeds", "show_io_stat", "cpu_graph_upper", "cpu_graph_lower", "cpu_invert_lower",
"cpu_single_graph", "show_uptime", "temp_scale"]
conf_dict: Dict[str, Union[str, int, bool]] = {}
color_theme: str = "Default"
theme_background: bool = True
truecolor: bool = True
shown_boxes: str = "cpu mem net proc"
update_ms: int = 2000
proc_update_mult: int = 2
proc_sorting: str = "cpu lazy"
proc_reversed: bool = False
proc_tree: bool = False
tree_depth: int = 3
proc_colors: bool = True
proc_gradient: bool = True
proc_per_core: bool = False
proc_mem_bytes: bool = True
cpu_graph_upper: str = "total"
cpu_graph_lower: str = "total"
cpu_invert_lower: bool = True
cpu_single_graph: bool = False
show_uptime: bool = True
check_temp: bool = True
cpu_sensor: str = "Auto"
show_coretemp: bool = True
temp_scale: str = "celsius"
draw_clock: str = "%X"
background_update: bool = True
custom_cpu_name: str = ""
disks_filter: str = ""
update_check: bool = True
mem_graphs: bool = True
show_swap: bool = True
swap_disk: bool = True
show_disks: bool = True
only_physical: bool = True
use_fstab: bool = False
show_io_stat: bool = True
io_mode: bool = False
io_graph_combined: bool = False
io_graph_speeds: str = ""
net_download: str = "10M"
net_upload: str = "10M"
net_color_fixed: bool = False
net_auto: bool = True
net_sync: bool = False
net_iface: str = ""
show_battery: bool = True
show_init: bool = True
log_level: str = "WARNING"
warnings: List[str] = []
info: List[str] = []
sorting_options: List[str] = ["pid", "program", "arguments", "threads", "user", "memory", "cpu lazy", "cpu responsive"]
log_levels: List[str] = ["ERROR", "WARNING", "INFO", "DEBUG"]
cpu_percent_fields: List = ["total"]
cpu_percent_fields.extend(getattr(psutil.cpu_times_percent(), "_fields", []))
temp_scales: List[str] = ["celsius", "fahrenheit", "kelvin", "rankine"]
cpu_sensors: List[str] = [ "Auto" ]
if hasattr(psutil, "sensors_temperatures"):
try:
_temps = psutil.sensors_temperatures()
if _temps:
for _name, _entries in _temps.items():
for _num, _entry in enumerate(_entries, 1):
if hasattr(_entry, "current"):
cpu_sensors.append(f'{_name}:{_num if _entry.label == "" else _entry.label}')
except:
pass
changed: bool = False
recreate: bool = False
config_file: str = ""
_initialized: bool = False
def __init__(self, path: str):
self.config_file = path
conf: Dict[str, Union[str, int, bool]] = self.load_config()
if not "version" in conf.keys():
self.recreate = True
self.info.append(f'Config file malformatted or missing, will be recreated on exit!')
elif conf["version"] != VERSION:
self.recreate = True
self.info.append(f'Config file version and bpytop version missmatch, will be recreated on exit!')
for key in self.keys:
if key in conf.keys() and conf[key] != "_error_":
setattr(self, key, conf[key])
else:
self.recreate = True
self.conf_dict[key] = getattr(self, key)
self._initialized = True
def __setattr__(self, name, value):
if self._initialized:
object.__setattr__(self, "changed", True)
object.__setattr__(self, name, value)
if name not in ["_initialized", "recreate", "changed"]:
self.conf_dict[name] = value
def load_config(self) -> Dict[str, Union[str, int, bool]]:
'''Load config from file, set correct types for values and return a dict'''
new_config: Dict[str,Union[str, int, bool]] = {}
conf_file: str = ""
if os.path.isfile(self.config_file):
conf_file = self.config_file
elif os.path.isfile("/etc/bpytop.conf"):
conf_file = "/etc/bpytop.conf"
else:
return new_config
try:
with open(conf_file, "r") as f:
for line in f:
line = line.strip()
if line.startswith("#? Config"):
new_config["version"] = line[line.find("v. ") + 3:]
continue
if not '=' in line:
continue
key, line = line.split('=', maxsplit=1)
if not key in self.keys:
continue
line = line.strip('"')
if type(getattr(self, key)) == int:
try:
new_config[key] = int(line)
except ValueError:
self.warnings.append(f'Config key "{key}" should be an integer!')
if type(getattr(self, key)) == bool:
try:
new_config[key] = bool(strtobool(line))
except ValueError:
self.warnings.append(f'Config key "{key}" can only be True or False!')
if type(getattr(self, key)) == str:
new_config[key] = str(line)
except Exception as e:
errlog.exception(str(e))
if "proc_sorting" in new_config and not new_config["proc_sorting"] in self.sorting_options:
new_config["proc_sorting"] = "_error_"
self.warnings.append(f'Config key "proc_sorted" didn\'t get an acceptable value!')
if "log_level" in new_config and not new_config["log_level"] in self.log_levels:
new_config["log_level"] = "_error_"
self.warnings.append(f'Config key "log_level" didn\'t get an acceptable value!')
if "update_ms" in new_config and int(new_config["update_ms"]) < 100:
new_config["update_ms"] = 100
self.warnings.append(f'Config key "update_ms" can\'t be lower than 100!')
for net_name in ["net_download", "net_upload"]:
if net_name in new_config and not new_config[net_name][0].isdigit(): # type: ignore
new_config[net_name] = "_error_"
if "cpu_sensor" in new_config and not new_config["cpu_sensor"] in self.cpu_sensors:
new_config["cpu_sensor"] = "_error_"
self.warnings.append(f'Config key "cpu_sensor" does not contain an available sensor!')
if "shown_boxes" in new_config and not new_config["shown_boxes"] == "":
for box in new_config["shown_boxes"].split(): #type: ignore
if not box in ["cpu", "mem", "net", "proc"]:
new_config["shown_boxes"] = "_error_"
self.warnings.append(f'Config key "shown_boxes" contains invalid box names!')
break
for cpu_graph in ["cpu_graph_upper", "cpu_graph_lower"]:
if cpu_graph in new_config and not new_config[cpu_graph] in self.cpu_percent_fields:
new_config[cpu_graph] = "_error_"
self.warnings.append(f'Config key "{cpu_graph}" does not contain an available cpu stat attribute!')
if "temp_scale" in new_config and not new_config["temp_scale"] in self.temp_scales:
new_config["temp_scale"] = "_error_"
self.warnings.append(f'Config key "temp_scale" does not contain a recognized temperature scale!')
return new_config
def save_config(self):
'''Save current config to config file if difference in values or version, creates a new file if not found'''
if not self.changed and not self.recreate: return
try:
with open(self.config_file, "w" if os.path.isfile(self.config_file) else "x") as f:
f.write(DEFAULT_CONF.substitute(self.conf_dict))
except Exception as e:
errlog.exception(str(e))
try:
CONFIG: Config = Config(CONFIG_FILE)
if DEBUG:
errlog.setLevel(logging.DEBUG)
else:
errlog.setLevel(getattr(logging, CONFIG.log_level))
DEBUG = CONFIG.log_level == "DEBUG"
errlog.info(f'New instance of bpytop version {VERSION} started with pid {os.getpid()}')
errlog.info(f'Loglevel set to {"DEBUG" if DEBUG else CONFIG.log_level}')
errlog.debug(f'Using psutil version {".".join(str(x) for x in psutil.version_info)}')
errlog.debug(f'CMD: {" ".join(sys.argv)}')
if CONFIG.info:
for info in CONFIG.info:
errlog.info(info)
CONFIG.info = []
if CONFIG.warnings:
for warning in CONFIG.warnings:
errlog.warning(warning)
CONFIG.warnings = []
except Exception as e:
errlog.exception(f'{e}')
raise SystemExit(1)
if ARG_BOXES:
_new_boxes: List = []
for _box in ARG_BOXES.split():
if _box in ["cpu", "mem", "net", "proc"]:
_new_boxes.append(_box)
CONFIG.shown_boxes = " ".join(_new_boxes)
del _box, _new_boxes
if SYSTEM == "Linux" and not os.path.isdir("/sys/class/power_supply"):
CONFIG.show_battery = False
if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7):
warn = f'psutil version {".".join(str(x) for x in psutil.version_info)} detected, version 5.7.0 or later required for full functionality!'
print("WARNING!", warn)
errlog.warning(warn)
#? Classes --------------------------------------------------------------------------------------->
class Term:
"""Terminal info and commands"""
width: int = 0
height: int = 0
resized: bool = False
_w : int = 0
_h : int = 0
fg: str = "" #* Default foreground color
bg: str = "" #* Default background color
hide_cursor = "\033[?25l" #* Hide terminal cursor
show_cursor = "\033[?25h" #* Show terminal cursor
alt_screen = "\033[?1049h" #* Switch to alternate screen
normal_screen = "\033[?1049l" #* Switch to normal screen
clear = "\033[2J\033[0;0f" #* Clear screen and set cursor to position 0,0
mouse_on = "\033[?1002h\033[?1015h\033[?1006h" #* Enable reporting of mouse position on click and release
mouse_off = "\033[?1002l" #* Disable mouse reporting
mouse_direct_on = "\033[?1003h" #* Enable reporting of mouse position at any movement
mouse_direct_off = "\033[?1003l" #* Disable direct mouse reporting
winch = threading.Event()
old_boxes: List = []
min_width: int = 0
min_height: int = 0
@classmethod
def refresh(cls, *args, force: bool = False):
"""Update width, height and set resized flag if terminal has been resized"""
if Init.running: cls.resized = False; return
if cls.resized: cls.winch.set(); return
cls._w, cls._h = os.get_terminal_size()
if (cls._w, cls._h) == (cls.width, cls.height) and cls.old_boxes == Box.boxes and not force: return
if force: Collector.collect_interrupt = True
if cls.old_boxes != Box.boxes:
w_p = h_p = 0
cls.min_width = cls.min_height = 0
cls.old_boxes = Box.boxes.copy()
for box_class in Box.__subclasses__():
for box_name in Box.boxes:
if box_name in str(box_class).capitalize():
if not (box_name == "cpu" and "proc" in Box.boxes) and not (box_name == "net" and "mem" in Box.boxes) and w_p + box_class.width_p <= 100:
w_p += box_class.width_p
cls.min_width += getattr(box_class, "min_w", 0)
if not (box_name in ["mem", "net"] and "proc" in Box.boxes) and h_p + box_class.height_p <= 100:
h_p += box_class.height_p
cls.min_height += getattr(box_class, "min_h", 0)
while (cls._w, cls._h) != (cls.width, cls.height) or (cls._w < cls.min_width or cls._h < cls.min_height):
if Init.running: Init.resized = True
CpuBox.clock_block = True
cls.resized = True
Collector.collect_interrupt = True
cls.width, cls.height = cls._w, cls._h
Draw.now(Term.clear)
box_width = min(50, cls._w - 2)
Draw.now(f'{create_box(cls._w // 2 - box_width // 2, cls._h // 2 - 2, 50, 3, "resizing", line_color=Colors.green, title_color=Colors.white)}',
f'{Mv.r(box_width // 4)}{Colors.default}{Colors.black_bg}{Fx.b}Width : {cls._w} Height: {cls._h}{Fx.ub}{Term.bg}{Term.fg}')
if cls._w < 80 or cls._h < 24:
while cls._w < cls.min_width or cls._h < cls.min_height:
Draw.now(Term.clear)
box_width = min(50, cls._w - 2)
Draw.now(f'{create_box(cls._w // 2 - box_width // 2, cls._h // 2 - 2, box_width, 4, "warning", line_color=Colors.red, title_color=Colors.white)}',
f'{Mv.r(box_width // 4)}{Colors.default}{Colors.black_bg}{Fx.b}Width: {Colors.red if cls._w < cls.min_width else Colors.green}{cls._w} ',
f'{Colors.default}Height: {Colors.red if cls._h < cls.min_height else Colors.green}{cls._h}{Term.bg}{Term.fg}',
f'{Mv.d(1)}{Mv.l(25)}{Colors.default}{Colors.black_bg}Current config need: {cls.min_width} x {cls.min_height}{Fx.ub}{Term.bg}{Term.fg}')
cls.winch.wait(0.3)
cls.winch.clear()
cls._w, cls._h = os.get_terminal_size()
else:
cls.winch.wait(0.3)
cls.winch.clear()
cls._w, cls._h = os.get_terminal_size()
Key.mouse = {}
Box.calc_sizes()
Collector.proc_counter = 1
if Menu.active: Menu.resized = True
Box.draw_bg(now=False)
cls.resized = False
Timer.finish()
@staticmethod
def echo(on: bool):
"""Toggle input echo"""
(iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr(sys.stdin.fileno())
if on:
lflag |= termios.ECHO # type: ignore
else:
lflag &= ~termios.ECHO # type: ignore
new_attr = [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, new_attr)
@staticmethod
def title(text: str = "") -> str:
out: str = f'{os.environ.get("TERMINAL_TITLE", "")}'
if out and text: out += " "
if text: out += f'{text}'
return f'\033]0;{out}\a'
class Fx:
"""Text effects
* trans(string: str): Replace whitespace with escape move right to not overwrite background behind whitespace.
* uncolor(string: str) : Removes all 24-bit color and returns string ."""
start = "\033[" #* Escape sequence start
sep = ";" #* Escape sequence separator
end = "m" #* Escape sequence end
reset = rs = "\033[0m" #* Reset foreground/background color and text effects
bold = b = "\033[1m" #* Bold on
unbold = ub = "\033[22m" #* Bold off
dark = d = "\033[2m" #* Dark on
undark = ud = "\033[22m" #* Dark off
italic = i = "\033[3m" #* Italic on
unitalic = ui = "\033[23m" #* Italic off
underline = u = "\033[4m" #* Underline on
ununderline = uu = "\033[24m" #* Underline off
blink = bl = "\033[5m" #* Blink on
unblink = ubl = "\033[25m" #* Blink off
strike = s = "\033[9m" #* Strike / crossed-out on
unstrike = us = "\033[29m" #* Strike / crossed-out off
#* Precompiled regex for finding a 24-bit color escape sequence in a string
color_re = re.compile(r"\033\[\d+;\d?;?\d*;?\d*;?\d*m")
@staticmethod
def trans(string: str):
return string.replace(" ", "\033[1C")
@classmethod
def uncolor(cls, string: str) -> str:
return f'{cls.color_re.sub("", string)}'
class Raw(object):
"""Set raw input mode for device"""
def __init__(self, stream):
self.stream = stream
self.fd = self.stream.fileno()
def __enter__(self):
self.original_stty = termios.tcgetattr(self.stream)
tty.setcbreak(self.stream)
def __exit__(self, type, value, traceback):
termios.tcsetattr(self.stream, termios.TCSANOW, self.original_stty)
class Nonblocking(object):
"""Set nonblocking mode for device"""
def __init__(self, stream):
self.stream = stream
self.fd = self.stream.fileno()
def __enter__(self):
self.orig_fl = fcntl.fcntl(self.fd, fcntl.F_GETFL)
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl | os.O_NONBLOCK)
def __exit__(self, *args):
fcntl.fcntl(self.fd, fcntl.F_SETFL, self.orig_fl)
class Mv:
"""Class with collection of cursor movement functions: .t[o](line, column) | .r[ight](columns) | .l[eft](columns) | .u[p](lines) | .d[own](lines) | .save() | .restore()"""
@staticmethod
def to(line: int, col: int) -> str:
return f'\033[{line};{col}f' #* Move cursor to line, column
@staticmethod
def right(x: int) -> str: #* Move cursor right x columns
return f'\033[{x}C'
@staticmethod
def left(x: int) -> str: #* Move cursor left x columns
return f'\033[{x}D'
@staticmethod
def up(x: int) -> str: #* Move cursor up x lines
return f'\033[{x}A'
@staticmethod
def down(x: int) -> str: #* Move cursor down x lines
return f'\033[{x}B'
save: str = "\033[s" #* Save cursor position
restore: str = "\033[u" #* Restore saved cursor postion
t = to
r = right
l = left
u = up
d = down
class Key:
"""Handles the threaded input reader for keypresses and mouse events"""
list: List[str] = []
mouse: Dict[str, List[List[int]]] = {}
mouse_pos: Tuple[int, int] = (0, 0)
escape: Dict[Union[str, Tuple[str, str]], str] = {
"\n" : "enter",
("\x7f", "\x08") : "backspace",
("[A", "OA") : "up",
("[B", "OB") : "down",
("[D", "OD") : "left",
("[C", "OC") : "right",
"[2~" : "insert",
"[3~" : "delete",
"[H" : "home",
"[F" : "end",
"[5~" : "page_up",
"[6~" : "page_down",
"\t" : "tab",
"[Z" : "shift_tab",
"OP" : "f1",
"OQ" : "f2",
"OR" : "f3",
"OS" : "f4",
"[15" : "f5",
"[17" : "f6",
"[18" : "f7",
"[19" : "f8",
"[20" : "f9",
"[21" : "f10",
"[23" : "f11",
"[24" : "f12"
}
new = threading.Event()
idle = threading.Event()
mouse_move = threading.Event()
mouse_report: bool = False
idle.set()
stopping: bool = False
started: bool = False
reader: threading.Thread
@classmethod
def start(cls):
cls.stopping = False
cls.reader = threading.Thread(target=cls._get_key)
cls.reader.start()
cls.started = True
@classmethod
def stop(cls):
if cls.started and cls.reader.is_alive():
cls.stopping = True
try:
cls.reader.join()
except:
pass
@classmethod
def last(cls) -> str:
if cls.list: return cls.list.pop()
else: return ""
@classmethod
def get(cls) -> str:
if cls.list: return cls.list.pop(0)
else: return ""
@classmethod
def get_mouse(cls) -> Tuple[int, int]:
if cls.new.is_set():
cls.new.clear()
return cls.mouse_pos
@classmethod
def mouse_moved(cls) -> bool:
if cls.mouse_move.is_set():
cls.mouse_move.clear()
return True
else:
return False
@classmethod
def has_key(cls) -> bool:
return bool(cls.list)
@classmethod
def clear(cls):
cls.list = []
@classmethod
def input_wait(cls, sec: float = 0.0, mouse: bool = False) -> bool:
'''Returns True if key is detected else waits out timer and returns False'''
if cls.list: return True
if mouse: Draw.now(Term.mouse_direct_on)
cls.new.wait(sec if sec > 0 else 0.0)
if mouse: Draw.now(Term.mouse_direct_off, Term.mouse_on)
if cls.new.is_set():
cls.new.clear()
return True
else:
return False
@classmethod
def break_wait(cls):
cls.list.append("_null")
cls.new.set()
sleep(0.01)
cls.new.clear()
@classmethod
def _get_key(cls):
"""Get a key or escape sequence from stdin, convert to readable format and save to keys list. Meant to be run in it's own thread."""
input_key: str = ""
clean_key: str = ""
try:
while not cls.stopping:
with Raw(sys.stdin):
if not select([sys.stdin], [], [], 0.1)[0]: #* Wait 100ms for input on stdin then restart loop to check for stop flag
continue
input_key += sys.stdin.read(1) #* Read 1 key safely with blocking on
if input_key == "\033": #* If first character is a escape sequence keep reading
cls.idle.clear() #* Report IO block in progress to prevent Draw functions from getting a IO Block error
Draw.idle.wait() #* Wait for Draw function to finish if busy
with Nonblocking(sys.stdin): #* Set non blocking to prevent read stall
input_key += sys.stdin.read(20)
if input_key.startswith("\033[<"):
_ = sys.stdin.read(1000)
cls.idle.set() #* Report IO blocking done
#errlog.debug(f'{repr(input_key)}')
if input_key == "\033": clean_key = "escape" #* Key is "escape" key if only containing \033
elif input_key.startswith(("\033[<0;", "\033[<35;", "\033[<64;", "\033[<65;")): #* Detected mouse event
try:
cls.mouse_pos = (int(input_key.split(";")[1]), int(input_key.split(";")[2].rstrip("mM")))
except:
pass
else:
if input_key.startswith("\033[<35;"): #* Detected mouse move in mouse direct mode
cls.mouse_move.set()
cls.new.set()
elif input_key.startswith("\033[<64;"): #* Detected mouse scroll up
clean_key = "mouse_scroll_up"
elif input_key.startswith("\033[<65;"): #* Detected mouse scroll down
clean_key = "mouse_scroll_down"
elif input_key.startswith("\033[<0;") and input_key.endswith("m"): #* Detected mouse click release
if Menu.active:
clean_key = "mouse_click"
else:
for key_name, positions in cls.mouse.items(): #* Check if mouse position is clickable
if list(cls.mouse_pos) in positions:
clean_key = key_name
break
else:
clean_key = "mouse_click"
elif input_key == "\\": clean_key = "\\" #* Clean up "\" to not return escaped
else:
for code in cls.escape.keys(): #* Go trough dict of escape codes to get the cleaned key name
if input_key.lstrip("\033").startswith(code):
clean_key = cls.escape[code]
break
else: #* If not found in escape dict and length of key is 1, assume regular character
if len(input_key) == 1:
clean_key = input_key
if clean_key:
cls.list.append(clean_key) #* Store up to 10 keys in input queue for later processing
if len(cls.list) > 10: del cls.list[0]
clean_key = ""
cls.new.set() #* Set threading event to interrupt main thread sleep
input_key = ""
except Exception as e:
errlog.exception(f'Input thread failed with exception: {e}')
cls.idle.set()
cls.list.clear()
clean_quit(1, thread=True)
class Draw:
'''Holds the draw buffer and manages IO blocking queue
* .buffer([+]name[!], *args, append=False, now=False, z=100) : Add *args to buffer
* - Adding "+" prefix to name sets append to True and appends to name's current string
* - Adding "!" suffix to name sets now to True and print name's current string
* .out(clear=False) : Print all strings in buffer, clear=True clear all buffers after
* .now(*args) : Prints all arguments as a string
* .clear(*names) : Clear named buffers, all if no argument
* .last_screen() : Prints all saved buffers
'''
strings: Dict[str, str] = {}
z_order: Dict[str, int] = {}
saved: Dict[str, str] = {}
save: Dict[str, bool] = {}
once: Dict[str, bool] = {}
idle = threading.Event()
idle.set()
@classmethod
def now(cls, *args):
'''Wait for input reader and self to be idle then print to screen'''
Key.idle.wait()
cls.idle.wait()
cls.idle.clear()
try:
print(*args, sep="", end="", flush=True)
except BlockingIOError:
pass
Key.idle.wait()
print(*args, sep="", end="", flush=True)
cls.idle.set()
@classmethod
def buffer(cls, name: str, *args: str, append: bool = False, now: bool = False, z: int = 100, only_save: bool = False, no_save: bool = False, once: bool = False):
string: str = ""
if name.startswith("+"):
name = name.lstrip("+")
append = True
if name.endswith("!"):
name = name.rstrip("!")
now = True
cls.save[name] = not no_save
cls.once[name] = once
if not name in cls.z_order or z != 100: cls.z_order[name] = z
if args: string = "".join(args)
if only_save:
if name not in cls.saved or not append: cls.saved[name] = ""
cls.saved[name] += string
else:
if name not in cls.strings or not append: cls.strings[name] = ""
cls.strings[name] += string
if now:
cls.out(name)
@classmethod
def out(cls, *names: str, clear = False):
out: str = ""
if not cls.strings: return
if names:
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in names and name in cls.strings:
out += cls.strings[name]
if cls.save[name]:
cls.saved[name] = cls.strings[name]
if clear or cls.once[name]:
cls.clear(name)
cls.now(out)
else:
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in cls.strings:
out += cls.strings[name]
if cls.save[name]:
cls.saved[name] = cls.strings[name]
if cls.once[name] and not clear:
cls.clear(name)
if clear:
cls.clear()
cls.now(out)
@classmethod
def saved_buffer(cls) -> str:
out: str = ""
for name in sorted(cls.z_order, key=cls.z_order.get, reverse=True): #type: ignore
if name in cls.saved:
out += cls.saved[name]
return out
@classmethod
def clear(cls, *names, saved: bool = False):
if names:
for name in names:
if name in cls.strings:
del cls.strings[name]
if name in cls.save:
del cls.save[name]
if name in cls.once:
del cls.once[name]
if saved:
if name in cls.saved:
del cls.saved[name]
if name in cls.z_order:
del cls.z_order[name]
else:
cls.strings = {}
cls.save = {}
cls.once = {}
if saved:
cls.saved = {}
cls.z_order = {}
class Color:
'''Holds representations for a 24-bit color value
__init__(color, depth="fg", default=False)
-- color accepts 6 digit hexadecimal: string "#RRGGBB", 2 digit hexadecimal: string "#FF" or decimal RGB "255 255 255" as a string.
-- depth accepts "fg" or "bg"
__call__(*args) joins str arguments to a string and apply color
__str__ returns escape sequence to set color
__iter__ returns iteration over red, green and blue in integer values of 0-255.
* Values: .hexa: str | .dec: Tuple[int, int, int] | .red: int | .green: int | .blue: int | .depth: str | .escape: str
'''
hexa: str; dec: Tuple[int, int, int]; red: int; green: int; blue: int; depth: str; escape: str; default: bool
def __init__(self, color: str, depth: str = "fg", default: bool = False):
self.depth = depth
self.default = default
try:
if not color:
self.dec = (-1, -1, -1)
self.hexa = ""
self.red = self.green = self.blue = -1
self.escape = "\033[49m" if depth == "bg" and default else ""
return
elif color.startswith("#"):
self.hexa = color
if len(self.hexa) == 3:
self.hexa += self.hexa[1:3] + self.hexa[1:3]
c = int(self.hexa[1:3], base=16)
self.dec = (c, c, c)
elif len(self.hexa) == 7:
self.dec = (int(self.hexa[1:3], base=16), int(self.hexa[3:5], base=16), int(self.hexa[5:7], base=16))
else:
raise ValueError(f'Incorrectly formatted hexadecimal rgb string: {self.hexa}')
else:
c_t = tuple(map(int, color.split(" ")))
if len(c_t) == 3:
self.dec = c_t #type: ignore
else:
raise ValueError(f'RGB dec should be "0-255 0-255 0-255"')
ct = self.dec[0] + self.dec[1] + self.dec[2]
if ct > 255*3 or ct < 0:
raise ValueError(f'RGB values out of range: {color}')
except Exception as e:
errlog.exception(str(e))
self.escape = ""
return
if self.dec and not self.hexa: self.hexa = f'{hex(self.dec[0]).lstrip("0x").zfill(2)}{hex(self.dec[1]).lstrip("0x").zfill(2)}{hex(self.dec[2]).lstrip("0x").zfill(2)}'
if self.dec and self.hexa:
self.red, self.green, self.blue = self.dec
self.escape = f'\033[{38 if self.depth == "fg" else 48};2;{";".join(str(c) for c in self.dec)}m'
if not CONFIG.truecolor or LOW_COLOR:
self.escape = f'{self.truecolor_to_256(rgb=self.dec, depth=self.depth)}'
def __str__(self) -> str:
return self.escape
def __repr__(self) -> str:
return repr(self.escape)
def __iter__(self) -> Iterable:
for c in self.dec: yield c
def __call__(self, *args: str) -> str:
if len(args) < 1: return ""
return f'{self.escape}{"".join(args)}{getattr(Term, self.depth)}'
@staticmethod
def truecolor_to_256(rgb: Tuple[int, int, int], depth: str="fg") -> str:
out: str = ""
pre: str = f'\033[{"38" if depth == "fg" else "48"};5;'
greyscale: Tuple[int, int, int] = ( rgb[0] // 11, rgb[1] // 11, rgb[2] // 11 )
if greyscale[0] == greyscale[1] == greyscale[2]:
out = f'{pre}{232 + greyscale[0]}m'
else:
out = f'{pre}{round(rgb[0] / 51) * 36 + round(rgb[1] / 51) * 6 + round(rgb[2] / 51) + 16}m'
return out
@staticmethod
def escape_color(hexa: str = "", r: int = 0, g: int = 0, b: int = 0, depth: str = "fg") -> str:
"""Returns escape sequence to set color
* accepts either 6 digit hexadecimal hexa="#RRGGBB", 2 digit hexadecimal: hexa="#FF"
* or decimal RGB: r=0-255, g=0-255, b=0-255
* depth="fg" or "bg"
"""
dint: int = 38 if depth == "fg" else 48
color: str = ""
if hexa:
try:
if len(hexa) == 3:
c = int(hexa[1:], base=16)
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{c};{c};{c}m'
else:
color = f'{Color.truecolor_to_256(rgb=(c, c, c), depth=depth)}'
elif len(hexa) == 7:
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{int(hexa[1:3], base=16)};{int(hexa[3:5], base=16)};{int(hexa[5:7], base=16)}m'
else:
color = f'{Color.truecolor_to_256(rgb=(int(hexa[1:3], base=16), int(hexa[3:5], base=16), int(hexa[5:7], base=16)), depth=depth)}'
except ValueError as e:
errlog.exception(f'{e}')
else:
if CONFIG.truecolor and not LOW_COLOR:
color = f'\033[{dint};2;{r};{g};{b}m'
else:
color = f'{Color.truecolor_to_256(rgb=(r, g, b), depth=depth)}'
return color
@classmethod
def fg(cls, *args) -> str:
if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="fg")
else: return cls.escape_color(hexa=args[0], depth="fg")
@classmethod
def bg(cls, *args) -> str:
if len(args) > 2: return cls.escape_color(r=args[0], g=args[1], b=args[2], depth="bg")
else: return cls.escape_color(hexa=args[0], depth="bg")
class Colors:
'''Standard colors for menus and dialogs'''
default = Color("#cc")
white = Color("#ff")
red = Color("#bf3636")
green = Color("#68bf36")
blue = Color("#0fd7ff")
yellow = Color("#db8b00")
black_bg = Color("#00", depth="bg")
null = Color("")
class Theme:
'''__init__ accepts a dict containing { "color_element" : "color" }'''
themes: Dict[str, str] = {}
cached: Dict[str, Dict[str, str]] = { "Default" : DEFAULT_THEME }
current: str = ""
main_bg = main_fg = title = hi_fg = selected_bg = selected_fg = inactive_fg = proc_misc = cpu_box = mem_box = net_box = proc_box = div_line = temp_start = temp_mid = temp_end = cpu_start = cpu_mid = cpu_end = free_start = free_mid = free_end = cached_start = cached_mid = cached_end = available_start = available_mid = available_end = used_start = used_mid = used_end = download_start = download_mid = download_end = upload_start = upload_mid = upload_end = graph_text = meter_bg = process_start = process_mid = process_end = Colors.default
gradient: Dict[str, List[str]] = {
"temp" : [],
"cpu" : [],
"free" : [],
"cached" : [],
"available" : [],
"used" : [],
"download" : [],
"upload" : [],
"proc" : [],
"proc_color" : [],
"process" : [],
}
def __init__(self, theme: str):
self.refresh()
self._load_theme(theme)
def __call__(self, theme: str):
for k in self.gradient.keys(): self.gradient[k] = []
self._load_theme(theme)
def _load_theme(self, theme: str):
tdict: Dict[str, str]
if theme in self.cached:
tdict = self.cached[theme]
elif theme in self.themes:
tdict = self._load_file(self.themes[theme])
self.cached[theme] = tdict
else:
errlog.warning(f'No theme named "{theme}" found!')
theme = "Default"
CONFIG.color_theme = theme
tdict = DEFAULT_THEME
self.current = theme
#if CONFIG.color_theme != theme: CONFIG.color_theme = theme
if not "graph_text" in tdict and "inactive_fg" in tdict:
tdict["graph_text"] = tdict["inactive_fg"]
if not "meter_bg" in tdict and "inactive_fg" in tdict:
tdict["meter_bg"] = tdict["inactive_fg"]
if not "process_start" in tdict and "cpu_start" in tdict:
tdict["process_start"] = tdict["cpu_start"]
tdict["process_mid"] = tdict.get("cpu_mid", "")
tdict["process_end"] = tdict.get("cpu_end", "")
#* Get key names from DEFAULT_THEME dict to not leave any color unset if missing from theme dict
for item, value in DEFAULT_THEME.items():
default = item in ["main_fg", "main_bg"]
depth = "bg" if item in ["main_bg", "selected_bg"] else "fg"
if item in tdict:
setattr(self, item, Color(tdict[item], depth=depth, default=default))
else:
setattr(self, item, Color(value, depth=depth, default=default))
#* Create color gradients from one, two or three colors, 101 values indexed 0-100
self.proc_start, self.proc_mid, self.proc_end = self.main_fg, Colors.null, self.inactive_fg
self.proc_color_start, self.proc_color_mid, self.proc_color_end = self.inactive_fg, Colors.null, self.process_start
rgb: Dict[str, Tuple[int, int, int]]
colors: List[List[int]] = []
for name in self.gradient:
rgb = { "start" : getattr(self, f'{name}_start').dec, "mid" : getattr(self, f'{name}_mid').dec, "end" : getattr(self, f'{name}_end').dec }
colors = [ list(getattr(self, f'{name}_start')) ]
if rgb["end"][0] >= 0:
r = 50 if rgb["mid"][0] >= 0 else 100
for first, second in ["start", "mid" if r == 50 else "end"], ["mid", "end"]:
for i in range(r):
colors += [[rgb[first][n] + i * (rgb[second][n] - rgb[first][n]) // r for n in range(3)]]
if r == 100:
break
self.gradient[name] += [ Color.fg(*color) for color in colors ]
else:
c = Color.fg(*rgb["start"])
self.gradient[name] += [c] * 101
#* Set terminal colors
Term.fg = f'{self.main_fg}'
Term.bg = f'{self.main_bg}' if CONFIG.theme_background else "\033[49m"
Draw.now(self.main_fg, self.main_bg)
@classmethod
def refresh(cls):
'''Sets themes dict with names and paths to all found themes'''
cls.themes = { "Default" : "Default" }
try:
for d in (THEME_DIR, USER_THEME_DIR):
if not d: continue
for f in os.listdir(d):
if f.endswith(".theme"):
cls.themes[f'{"" if d == THEME_DIR else "+"}{f[:-6]}'] = f'{d}/{f}'
except Exception as e:
errlog.exception(str(e))
@staticmethod
def _load_file(path: str) -> Dict[str, str]:
'''Load a bashtop formatted theme file and return a dict'''
new_theme: Dict[str, str] = {}
try:
with open(path, "r") as f:
for line in f:
if not line.startswith("theme["): continue
key = line[6:line.find("]")]
s = line.find('"')
value = line[s + 1:line.find('"', s + 1)]
new_theme[key] = value
except Exception as e:
errlog.exception(str(e))
return new_theme
class Banner:
'''Holds the bpytop banner, .draw(line, [col=0], [center=False], [now=False])'''
out: List[str] = []
c_color: str = ""
length: int = 0
if not out:
for num, (color, color2, line) in enumerate(BANNER_SRC):
if len(line) > length: length = len(line)
out_var = ""
line_color = Color.fg(color)
line_color2 = Color.fg(color2)
line_dark = Color.fg(f'#{80 - num * 6}')
for n, letter in enumerate(line):
if letter == "█" and c_color != line_color:
if 5 < n < 25: c_color = line_color2
else: c_color = line_color
out_var += c_color
elif letter == " ":
letter = f'{Mv.r(1)}'
c_color = ""
elif letter != "█" and c_color != line_dark:
c_color = line_dark
out_var += line_dark
out_var += letter
out.append(out_var)
@classmethod
def draw(cls, line: int, col: int = 0, center: bool = False, now: bool = False):
out: str = ""
if center: col = Term.width // 2 - cls.length // 2
for n, o in enumerate(cls.out):
out += f'{Mv.to(line + n, col)}{o}'
out += f'{Term.fg}'
if now: Draw.out(out)
else: return out
class Symbol:
h_line: str = "─"
v_line: str = "│"
left_up: str = "┌"
right_up: str = "┐"
left_down: str = "└"
right_down: str = "┘"
title_left: str = "┤"
title_right: str = "├"
div_up: str = "┬"
div_down: str = "┴"
graph_up: Dict[float, str] = {
0.0 : " ", 0.1 : "⢀", 0.2 : "⢠", 0.3 : "⢰", 0.4 : "⢸",
1.0 : "⡀", 1.1 : "⣀", 1.2 : "⣠", 1.3 : "⣰", 1.4 : "⣸",
2.0 : "⡄", 2.1 : "⣄", 2.2 : "⣤", 2.3 : "⣴", 2.4 : "⣼",
3.0 : "⡆", 3.1 : "⣆", 3.2 : "⣦", 3.3 : "⣶", 3.4 : "⣾",
4.0 : "⡇", 4.1 : "⣇", 4.2 : "⣧", 4.3 : "⣷", 4.4 : "⣿"
}
graph_up_small = graph_up.copy()
graph_up_small[0.0] = "\033[1C"
graph_down: Dict[float, str] = {
0.0 : " ", 0.1 : "⠈", 0.2 : "⠘", 0.3 : "⠸", 0.4 : "⢸",
1.0 : "⠁", 1.1 : "⠉", 1.2 : "⠙", 1.3 : "⠹", 1.4 : "⢹",
2.0 : "⠃", 2.1 : "⠋", 2.2 : "⠛", 2.3 : "⠻", 2.4 : "⢻",
3.0 : "⠇", 3.1 : "⠏", 3.2 : "⠟", 3.3 : "⠿", 3.4 : "⢿",
4.0 : "⡇", 4.1 : "⡏", 4.2 : "⡟", 4.3 : "⡿", 4.4 : "⣿"
}
graph_down_small = graph_down.copy()
graph_down_small[0.0] = "\033[1C"
meter: str = "■"
up: str = "↑"
down: str = "↓"
left: str = "←"
right: str = "→"
enter: str = "↲"
ok: str = f'{Color.fg("#30ff50")}√{Color.fg("#cc")}'
fail: str = f'{Color.fg("#ff3050")}!{Color.fg("#cc")}'
class Graph:
'''Class for creating and adding to graphs
* __str__ : returns graph as a string
* add(value: int) : adds a value to graph and returns it as a string
* __call__ : same as add
'''
out: str
width: int
height: int
graphs: Dict[bool, List[str]]
colors: List[str]
invert: bool
max_value: int
color_max_value: int
offset: int
no_zero: bool
round_up_low: bool
current: bool
last: int
lowest: int = 0
symbol: Dict[float, str]
def __init__(self, width: int, height: int, color: Union[List[str], Color, None], data: List[int], invert: bool = False, max_value: int = 0, offset: int = 0, color_max_value: Union[int, None] = None, no_zero: bool = False, round_up_low: bool = False):
self.graphs: Dict[bool, List[str]] = {False : [], True : []}
self.current: bool = True
self.width = width
self.height = height
self.invert = invert
self.offset = offset
self.round_up_low = round_up_low
self.no_zero = no_zero or round_up_low
if not data: data = [0]
if max_value:
self.lowest = 1 if self.round_up_low else 0
self.max_value = max_value
data = [ min_max((v + offset) * 100 // (max_value + offset), min_max(v + offset, 0, self.lowest), 100) for v in data ] #* Convert values to percentage values of max_value with max_value as ceiling
else:
self.max_value = 0
if color_max_value:
self.color_max_value = color_max_value
else:
self.color_max_value = self.max_value
if self.color_max_value and self.max_value:
color_scale = int(100.0 * self.max_value / self.color_max_value)
else:
color_scale = 100
self.colors: List[str] = []
if isinstance(color, list) and height > 1:
for i in range(1, height + 1): self.colors.insert(0, color[min(100, i * color_scale // height)]) #* Calculate colors of graph
if invert: self.colors.reverse()
elif isinstance(color, Color) and height > 1:
self.colors = [ f'{color}' for _ in range(height) ]
else:
if isinstance(color, list): self.colors = color
elif isinstance(color, Color): self.colors = [ f'{color}' for _ in range(101) ]
if self.height == 1:
self.symbol = Symbol.graph_down_small if invert else Symbol.graph_up_small
else:
self.symbol = Symbol.graph_down if invert else Symbol.graph_up
value_width: int = ceil(len(data) / 2)
filler: str = ""
if value_width > width: #* If the size of given data set is bigger then width of graph, shrink data set
data = data[-(width*2):]
value_width = ceil(len(data) / 2)
elif value_width < width: #* If the size of given data set is smaller then width of graph, fill graph with whitespace
filler = self.symbol[0.0] * (width - value_width)
if len(data) % 2: data.insert(0, 0)
for _ in range(height):
for b in [True, False]:
self.graphs[b].append(filler)
self._create(data, new=True)
def _create(self, data: List[int], new: bool = False):
h_high: int
h_low: int
value: Dict[str, int] = { "left" : 0, "right" : 0 }
val: int
side: str
#* Create the graph
for h in range(self.height):
h_high = round(100 * (self.height - h) / self.height) if self.height > 1 else 100
h_low = round(100 * (self.height - (h + 1)) / self.height) if self.height > 1 else 0
for v in range(len(data)):
if new: self.current = bool(v % 2) #* Switch between True and False graphs
if new and v == 0: self.last = 0
for val, side in [self.last, "left"], [data[v], "right"]: # type: ignore
if val >= h_high:
value[side] = 4
elif val <= h_low:
value[side] = 0
else:
if self.height == 1: value[side] = round(val * 4 / 100 + 0.5)
else: value[side] = round((val - h_low) * 4 / (h_high - h_low) + 0.1)
if self.no_zero and not (new and v == 0 and side == "left") and h == self.height - 1 and value[side] < 1 and not (self.round_up_low and val == 0): value[side] = 1
if new: self.last = data[v]
self.graphs[self.current][h] += self.symbol[float(value["left"] + value["right"] / 10)]
if data: self.last = data[-1]
self.out = ""
if self.height == 1:
self.out += f'{"" if not self.colors else (THEME.inactive_fg if self.last < 5 else self.colors[self.last])}{self.graphs[self.current][0]}'
elif self.height > 1:
for h in range(self.height):
if h > 0: self.out += f'{Mv.d(1)}{Mv.l(self.width)}'
self.out += f'{"" if not self.colors else self.colors[h]}{self.graphs[self.current][h if not self.invert else (self.height - 1) - h]}'
if self.colors: self.out += f'{Term.fg}'
def __call__(self, value: Union[int, None] = None) -> str:
if not isinstance(value, int): return self.out
self.current = not self.current
if self.height == 1:
if self.graphs[self.current][0].startswith(self.symbol[0.0]):
self.graphs[self.current][0] = self.graphs[self.current][0].replace(self.symbol[0.0], "", 1)
else:
self.graphs[self.current][0] = self.graphs[self.current][0][1:]
else:
for n in range(self.height):
self.graphs[self.current][n] = self.graphs[self.current][n][1:]
if self.max_value: value = min_max((value + self.offset) * 100 // (self.max_value + self.offset), min_max(value + self.offset, 0, self.lowest), 100)
self._create([value])
return self.out
def add(self, value: Union[int, None] = None) -> str:
return self.__call__(value)
def __str__(self):
return self.out
def __repr__(self):
return repr(self.out)
class Graphs:
'''Holds all graphs and lists of graphs for dynamically created graphs'''
cpu: Dict[str, Graph] = {}
cores: List[Graph] = [NotImplemented] * THREADS
temps: List[Graph] = [NotImplemented] * (THREADS + 1)
net: Dict[str, Graph] = {}
detailed_cpu: Graph = NotImplemented
detailed_mem: Graph = NotImplemented
pid_cpu: Dict[int, Graph] = {}
disk_io: Dict[str, Dict[str, Graph]] = {}
class Meter:
'''Creates a percentage meter
__init__(value, width, theme, gradient_name) to create new meter
__call__(value) to set value and return meter as a string
__str__ returns last set meter as a string
'''
out: str
color_gradient: List[str]
color_inactive: Color
gradient_name: str
width: int
invert: bool
saved: Dict[int, str]
def __init__(self, value: int, width: int, gradient_name: str, invert: bool = False):
self.gradient_name = gradient_name
self.color_gradient = THEME.gradient[gradient_name]
self.color_inactive = THEME.meter_bg
self.width = width
self.saved = {}
self.invert = invert
self.out = self._create(value)
def __call__(self, value: Union[int, None]) -> str:
if not isinstance(value, int): return self.out
if value > 100: value = 100
elif value < 0: value = 100
if value in self.saved:
self.out = self.saved[value]
else:
self.out = self._create(value)
return self.out
def __str__(self) -> str:
return self.out
def __repr__(self):
return repr(self.out)
def _create(self, value: int) -> str:
if value > 100: value = 100
elif value < 0: value = 100
out: str = ""
for i in range(1, self.width + 1):
if value >= round(i * 100 / self.width):
out += f'{self.color_gradient[round(i * 100 / self.width) if not self.invert else round(100 - (i * 100 / self.width))]}{Symbol.meter}'
else:
out += self.color_inactive(Symbol.meter * (self.width + 1 - i))
break
else:
out += f'{Term.fg}'
if not value in self.saved:
self.saved[value] = out
return out
class Meters:
cpu: Meter
battery: Meter
mem: Dict[str, Union[Meter, Graph]] = {}
swap: Dict[str, Union[Meter, Graph]] = {}
disks_used: Dict[str, Meter] = {}
disks_free: Dict[str, Meter] = {}
class Box:
'''Box class with all needed attributes for create_box() function'''
name: str
num: int = 0
boxes: List = []
view_modes: Dict[str, List] = {"full" : ["cpu", "mem", "net", "proc"], "stat" : ["cpu", "mem", "net"], "proc" : ["cpu", "proc"]}
view_mode: str
for view_mode in view_modes:
if sorted(CONFIG.shown_boxes.split(), key=str.lower) == view_modes[view_mode]:
break
else:
view_mode = "user"
view_modes["user"] = CONFIG.shown_boxes.split()
height_p: int
width_p: int
x: int
y: int
width: int
height: int
out: str
bg: str
_b_cpu_h: int
_b_mem_h: int
redraw_all: bool
buffers: List[str] = []
clock_on: bool = False
clock: str = ""
clock_len: int = 0
resized: bool = False
clock_custom_format: Dict[str, Any] = {
"/host" : os.uname()[1],
"/user" : os.environ.get("USER") or pwd.getpwuid(os.getuid())[0],
"/uptime" : "",
}
if clock_custom_format["/host"].endswith(".local"):
clock_custom_format["/host"] = clock_custom_format["/host"].replace(".local", "")
@classmethod
def calc_sizes(cls):
'''Calculate sizes of boxes'''
cls.boxes = CONFIG.shown_boxes.split()
for sub in cls.__subclasses__():
sub._calc_size() # type: ignore
sub.resized = True # type: ignore
@classmethod
def draw_update_ms(cls, now: bool = True):
if not "cpu" in cls.boxes: return
update_string: str = f'{CONFIG.update_ms}ms'
xpos: int = CpuBox.x + CpuBox.width - len(update_string) - 15
if not "+" in Key.mouse:
Key.mouse["+"] = [[xpos + 7 + i, CpuBox.y] for i in range(3)]
Key.mouse["-"] = [[CpuBox.x + CpuBox.width - 4 + i, CpuBox.y] for i in range(3)]
Draw.buffer("update_ms!" if now and not Menu.active else "update_ms",
f'{Mv.to(CpuBox.y, xpos)}{THEME.cpu_box(Symbol.h_line * 7, Symbol.title_left)}{Fx.b}{THEME.hi_fg("+")} ',
f'{THEME.title(update_string)} {THEME.hi_fg("-")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}', only_save=Menu.active, once=True)
if now and not Menu.active:
Draw.clear("update_ms")
if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery():
Draw.out("battery")
@classmethod
def draw_clock(cls, force: bool = False):
if not "cpu" in cls.boxes or not cls.clock_on: return
out: str = ""
if force: pass
elif Term.resized or strftime(CONFIG.draw_clock) == cls.clock: return
clock_string = cls.clock = strftime(CONFIG.draw_clock)
for custom in cls.clock_custom_format:
if custom in clock_string:
if custom == "/uptime": cls.clock_custom_format["/uptime"] = CpuCollector.uptime
clock_string = clock_string.replace(custom, cls.clock_custom_format[custom])
clock_len = len(clock_string[:(CpuBox.width-56)])
if cls.clock_len != clock_len and not CpuBox.resized:
out = f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(cls.clock_len//2))}{Fx.ub}{THEME.cpu_box}{Symbol.h_line * cls.clock_len}'
cls.clock_len = clock_len
now: bool = False if Menu.active else not force
out += (f'{Mv.to(CpuBox.y, ((CpuBox.width)//2)-(clock_len//2))}{Fx.ub}{THEME.cpu_box}'
f'{Symbol.title_left}{Fx.b}{THEME.title(clock_string[:clock_len])}{Fx.ub}{THEME.cpu_box}{Symbol.title_right}{Term.fg}')
Draw.buffer("clock", out, z=1, now=now, once=not force, only_save=Menu.active)
if now and not Menu.active:
if CONFIG.show_battery and hasattr(psutil, "sensors_battery") and psutil.sensors_battery():
Draw.out("battery")
@classmethod
def empty_bg(cls) -> str:
return (f'{Term.clear}' +
(f'{Banner.draw(Term.height // 2 - 10, center=True)}'
f'{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}[esc] Menu'
f'{Mv.r(25)}{Fx.i}Version: {VERSION}{Fx.ui}' if Term.height > 22 else "") +
f'{Mv.d(1)}{Mv.l(34)}{Fx.b}All boxes hidden!'
f'{Mv.d(1)}{Mv.l(17)}{Fx.b}[1] {Fx.ub}Toggle CPU box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[2] {Fx.ub}Toggle MEM box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[3] {Fx.ub}Toggle NET box'
f'{Mv.d(1)}{Mv.l(18)}{Fx.b}[4] {Fx.ub}Toggle PROC box'
f'{Mv.d(1)}{Mv.l(19)}{Fx.b}[m] {Fx.ub}Cycle presets'
f'{Mv.d(1)}{Mv.l(17)}{Fx.b}[q] Quit {Fx.ub}{Term.bg}{Term.fg}')
@classmethod
def draw_bg(cls, now: bool = True):
'''Draw all boxes outlines and titles'''
out: str = ""
if not cls.boxes:
out = cls.empty_bg()
else:
out = "".join(sub._draw_bg() for sub in cls.__subclasses__()) # type: ignore
Draw.buffer("bg", out, now=now, z=1000, only_save=Menu.active, once=True)
cls.draw_update_ms(now=now)
if CONFIG.draw_clock: cls.draw_clock(force=True)
class SubBox:
box_x: int = 0
box_y: int = 0
box_width: int = 0
box_height: int = 0
box_columns: int = 0
column_size: int = 0
class CpuBox(Box, SubBox):
name = "cpu"
num = 1
x = 1
y = 1
height_p = 32
width_p = 100
min_w: int = 60
min_h: int = 8
resized: bool = True
redraw: bool = False
buffer: str = "cpu"
battery_percent: int = 1000
battery_secs: int = 0
battery_status: str = "Unknown"
old_battery_pos = 0
old_battery_len = 0
battery_path: Union[str, None] = ""
battery_clear: bool = False
battery_symbols: Dict[str, str] = {"Charging": "▲",
"Discharging": "▼",
"Full": "■",
"Not charging": "■"}
clock_block: bool = True
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "cpu" in cls.boxes:
Box._b_cpu_h = 0
cls.width = Term.width
return
cpu = CpuCollector
height_p: int
if cls.boxes == ["cpu"]:
height_p = 100
else:
height_p = cls.height_p
cls.width = round(Term.width * cls.width_p / 100)
cls.height = round(Term.height * height_p / 100)
if cls.height < 8: cls.height = 8
Box._b_cpu_h = cls.height
#THREADS = 64
cls.box_columns = ceil((THREADS + 1) / (cls.height - 5))
if cls.box_columns * (20 + 13 if cpu.got_sensors else 21) < cls.width - (cls.width // 3):
cls.column_size = 2
cls.box_width = (20 + 13 if cpu.got_sensors else 21) * cls.box_columns - ((cls.box_columns - 1) * 1)
elif cls.box_columns * (15 + 6 if cpu.got_sensors else 15) < cls.width - (cls.width // 3):
cls.column_size = 1
cls.box_width = (15 + 6 if cpu.got_sensors else 15) * cls.box_columns - ((cls.box_columns - 1) * 1)
elif cls.box_columns * (8 + 6 if cpu.got_sensors else 8) < cls.width - (cls.width // 3):
cls.column_size = 0
else:
cls.box_columns = (cls.width - cls.width // 3) // (8 + 6 if cpu.got_sensors else 8); cls.column_size = 0
if cls.column_size == 0: cls.box_width = (8 + 6 if cpu.got_sensors else 8) * cls.box_columns + 1
cls.box_height = ceil(THREADS / cls.box_columns) + 4
if cls.box_height > cls.height - 2: cls.box_height = cls.height - 2
cls.box_x = (cls.width - 1) - cls.box_width
cls.box_y = cls.y + ceil((cls.height - 2) / 2) - ceil(cls.box_height / 2) + 1
@classmethod
def _draw_bg(cls) -> str:
if not "cpu" in cls.boxes: return ""
if not "M" in Key.mouse:
Key.mouse["M"] = [[cls.x + 10 + i, cls.y] for i in range(6)]
return (f'{create_box(box=cls, line_color=THEME.cpu_box)}'
f'{Mv.to(cls.y, cls.x + 10)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("M")}{THEME.title("enu")}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}'
f'{create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title=CPU_NAME[:cls.box_width - 14] if not CONFIG.custom_cpu_name else CONFIG.custom_cpu_name[:cls.box_width - 14])}')
@classmethod
def battery_activity(cls) -> bool:
if not hasattr(psutil, "sensors_battery") or psutil.sensors_battery() == None:
if cls.battery_percent != 1000:
cls.battery_clear = True
return False
if cls.battery_path == "":
cls.battery_path = None
if os.path.isdir("/sys/class/power_supply"):
for directory in sorted(os.listdir("/sys/class/power_supply")):
if directory.startswith('BAT') or 'battery' in directory.lower():
cls.battery_path = f'/sys/class/power_supply/{directory}/'
break
return_true: bool = False
percent: int = ceil(getattr(psutil.sensors_battery(), "percent", 0))
if percent != cls.battery_percent:
cls.battery_percent = percent
return_true = True
seconds: int = getattr(psutil.sensors_battery(), "secsleft", 0)
if seconds != cls.battery_secs:
cls.battery_secs = seconds
return_true = True
status: str = "not_set"
if cls.battery_path:
status = readfile(cls.battery_path + "status", default="not_set")
if status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == True:
status = "Charging" if cls.battery_percent < 100 else "Full"
elif status == "not_set" and getattr(psutil.sensors_battery(), "power_plugged", None) == False:
status = "Discharging"
elif status == "not_set":
status = "Unknown"
if status != cls.battery_status:
cls.battery_status = status
return_true = True
return return_true or cls.resized or cls.redraw or Menu.active
@classmethod
def _draw_fg(cls):
if not "cpu" in cls.boxes: return
cpu = CpuCollector
if cpu.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
lavg: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2
hh: int = ceil(h / 2)
hh2: int = h - hh
mid_line: bool = False
temp: int = 0
unit: str = ""
if not CONFIG.cpu_single_graph and CONFIG.cpu_graph_upper != CONFIG.cpu_graph_lower:
mid_line = True
if h % 2: hh = floor(h / 2)
else: hh2 -= 1
hide_cores: bool = (cpu.cpu_temp_only or not CONFIG.show_coretemp) and cpu.got_sensors
ct_width: int = (max(6, 6 * cls.column_size)) * hide_cores
if cls.resized or cls.redraw:
if not "m" in Key.mouse:
Key.mouse["m"] = [[cls.x + 16 + i, cls.y] for i in range(12)]
out_misc += f'{Mv.to(cls.y, cls.x + 16)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("m")}{THEME.title}ode:{Box.view_mode}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}'
Graphs.cpu["up"] = Graph(w - bw - 3, (h if CONFIG.cpu_single_graph else hh), THEME.gradient["cpu"], cpu.cpu_upper, round_up_low=True)
if not CONFIG.cpu_single_graph:
Graphs.cpu["down"] = Graph(w - bw - 3, hh2, THEME.gradient["cpu"], cpu.cpu_lower, invert=CONFIG.cpu_invert_lower, round_up_low=True)
Meters.cpu = Meter(cpu.cpu_usage[0][-1], bw - (21 if cpu.got_sensors else 9), "cpu")
if cls.column_size > 0 or ct_width > 0:
for n in range(THREADS):
Graphs.cores[n] = Graph(5 * cls.column_size + ct_width, 1, None, cpu.cpu_usage[n + 1])
if cpu.got_sensors:
Graphs.temps[0] = Graph(5, 1, None, cpu.cpu_temp[0], max_value=cpu.cpu_temp_crit, offset=-23)
if cls.column_size > 1:
for n in range(1, THREADS + 1):
if not cpu.cpu_temp[n]:
continue
Graphs.temps[n] = Graph(5, 1, None, cpu.cpu_temp[n], max_value=cpu.cpu_temp_crit, offset=-23)
Draw.buffer("cpu_misc", out_misc, only_save=True)
if CONFIG.show_battery and cls.battery_activity():
bat_out: str = ""
if cls.battery_secs > 0:
battery_time: str = f' {cls.battery_secs // 3600:02}:{(cls.battery_secs % 3600) // 60:02}'
else:
battery_time = ""
if not hasattr(Meters, "battery") or cls.resized:
Meters.battery = Meter(cls.battery_percent, 10, "cpu", invert=True)
battery_symbol: str = cls.battery_symbols.get(cls.battery_status, "○")
battery_len: int = len(f'{CONFIG.update_ms}') + (11 if cls.width >= 100 else 0) + len(battery_time) + len(f'{cls.battery_percent}')
battery_pos = cls.width - battery_len - 17
if (battery_pos != cls.old_battery_pos or battery_len != cls.old_battery_len) and cls.old_battery_pos > 0 and not cls.resized:
bat_out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}'
cls.old_battery_pos, cls.old_battery_len = battery_pos, battery_len
bat_out += (f'{Mv.to(y-1, battery_pos)}{THEME.cpu_box(Symbol.title_left)}{Fx.b}{THEME.title}BAT{battery_symbol} {cls.battery_percent}%'+
("" if cls.width < 100 else f' {Fx.ub}{Meters.battery(cls.battery_percent)}{Fx.b}') +
f'{THEME.title}{battery_time}{Fx.ub}{THEME.cpu_box(Symbol.title_right)}')
Draw.buffer("battery", f'{bat_out}{Term.fg}', only_save=Menu.active)
elif cls.battery_clear:
out += f'{Mv.to(y-1, cls.old_battery_pos)}{THEME.cpu_box(Symbol.h_line*(cls.old_battery_len+4))}'
cls.battery_clear = False
cls.battery_percent = 1000
cls.battery_secs = 0
cls.battery_status = "Unknown"
cls.old_battery_pos = 0
cls.old_battery_len = 0
cls.battery_path = ""
Draw.clear("battery", saved=True)
cx = cy = cc = 0
ccw = (bw + 1) // cls.box_columns
if cpu.cpu_freq:
freq: str = f'{cpu.cpu_freq} Mhz' if cpu.cpu_freq < 1000 else f'{float(cpu.cpu_freq / 1000):.1f} GHz'
out += f'{Mv.to(by - 1, bx + bw - 9)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title(freq)}{Fx.ub}{THEME.div_line(Symbol.title_right)}'
out += f'{Mv.to(y, x)}{Graphs.cpu["up"](None if cls.resized else cpu.cpu_upper[-1])}'
if mid_line:
out += (f'{Mv.to(y+hh, x-1)}{THEME.cpu_box(Symbol.title_right)}{THEME.div_line}{Symbol.h_line * (w - bw - 3)}{THEME.div_line(Symbol.title_left)}'
f'{Mv.to(y+hh, x+((w-bw)//2)-((len(CONFIG.cpu_graph_upper)+len(CONFIG.cpu_graph_lower))//2)-4)}{THEME.main_fg}{CONFIG.cpu_graph_upper}{Mv.r(1)}▲▼{Mv.r(1)}{CONFIG.cpu_graph_lower}')
if not CONFIG.cpu_single_graph and Graphs.cpu.get("down"):
out += f'{Mv.to(y + hh + (1 * mid_line), x)}{Graphs.cpu["down"](None if cls.resized else cpu.cpu_lower[-1])}'
out += (f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b}{"CPU "}{Fx.ub}{Meters.cpu(cpu.cpu_usage[0][-1])}'
f'{THEME.gradient["cpu"][cpu.cpu_usage[0][-1]]}{cpu.cpu_usage[0][-1]:>4}{THEME.main_fg}%')
if cpu.got_sensors:
try:
temp, unit = temperature(cpu.cpu_temp[0][-1], CONFIG.temp_scale)
out += (f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][min_max(cpu.cpu_temp[0][-1], 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}{Graphs.temps[0](None if cls.resized else cpu.cpu_temp[0][-1])}'
f'{temp:>4}{THEME.main_fg}{unit}')
except:
cpu.got_sensors = False
cy += 1
for n in range(1, THREADS + 1):
out += f'{THEME.main_fg}{Mv.to(by + cy, bx + cx)}{Fx.b + "C" + Fx.ub if THREADS < 100 else ""}{str(n):<{2 if cls.column_size == 0 else 3}}'
if cls.column_size > 0 or ct_width > 0:
out += f'{THEME.inactive_fg}{"⡀" * (5 * cls.column_size + ct_width)}{Mv.l(5 * cls.column_size + ct_width)}{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}{Graphs.cores[n-1](None if cls.resized else cpu.cpu_usage[n][-1])}'
else:
out += f'{THEME.gradient["cpu"][cpu.cpu_usage[n][-1]]}'
out += f'{cpu.cpu_usage[n][-1]:>{3 if cls.column_size < 2 else 4}}{THEME.main_fg}%'
if cpu.got_sensors and cpu.cpu_temp[n] and not hide_cores:
try:
temp, unit = temperature(cpu.cpu_temp[n][-1], CONFIG.temp_scale)
if cls.column_size > 1:
out += f'{THEME.inactive_fg} ⡀⡀⡀⡀⡀{Mv.l(5)}{THEME.gradient["temp"][min_max(cpu.cpu_temp[n][-1], 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}{Graphs.temps[n](None if cls.resized else cpu.cpu_temp[n][-1])}'
else:
out += f'{THEME.gradient["temp"][min_max(temp, 0, cpu.cpu_temp_crit) * 100 // cpu.cpu_temp_crit]}'
out += f'{temp:>4}{THEME.main_fg}{unit}'
except:
cpu.got_sensors = False
elif cpu.got_sensors and not hide_cores:
out += f'{Mv.r(max(6, 6 * cls.column_size))}'
out += f'{THEME.div_line(Symbol.v_line)}'
cy += 1
if cy > ceil(THREADS/cls.box_columns) and n != THREADS:
cc += 1; cy = 1; cx = ccw * cc
if cc == cls.box_columns: break
if cy < bh - 1: cy = bh - 1
if cy < bh and cc < cls.box_columns:
if cls.column_size == 2 and cpu.got_sensors:
lavg = f' Load AVG: {" ".join(str(l) for l in cpu.load_avg):^19.19}'
elif cls.column_size == 2 or (cls.column_size == 1 and cpu.got_sensors):
lavg = f'LAV: {" ".join(str(l) for l in cpu.load_avg):^14.14}'
elif cls.column_size == 1 or (cls.column_size == 0 and cpu.got_sensors):
lavg = f'L {" ".join(str(round(l, 1)) for l in cpu.load_avg):^11.11}'
else:
lavg = f'{" ".join(str(round(l, 1)) for l in cpu.load_avg[:2]):^7.7}'
out += f'{Mv.to(by + cy, bx + cx)}{THEME.main_fg}{lavg}{THEME.div_line(Symbol.v_line)}'
if CONFIG.show_uptime:
out += f'{Mv.to(y + (0 if not CONFIG.cpu_invert_lower or CONFIG.cpu_single_graph else h - 1), x + 1)}{THEME.graph_text}{Fx.trans("up " + cpu.uptime)}'
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.resized = cls.redraw = cls.clock_block = False
class MemBox(Box):
name = "mem"
num = 2
height_p = 38
width_p = 45
min_w: int = 36
min_h: int = 10
x = 1
y = 1
mem_meter: int = 0
mem_size: int = 0
disk_meter: int = 0
divider: int = 0
mem_width: int = 0
disks_width: int = 0
disks_io_h: int = 0
disks_io_order: List[str] = []
graph_speeds: Dict[str, int] = {}
graph_height: int
resized: bool = True
redraw: bool = False
buffer: str = "mem"
swap_on: bool = CONFIG.show_swap
Box.buffers.append(buffer)
mem_names: List[str] = ["used", "available", "cached", "free"]
swap_names: List[str] = ["used", "free"]
@classmethod
def _calc_size(cls):
if not "mem" in cls.boxes:
Box._b_mem_h = 0
cls.width = Term.width
return
width_p: int; height_p: int
if not "proc" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
if not "cpu" in cls.boxes:
height_p = 60 if "net" in cls.boxes else 98
elif not "net" in cls.boxes:
height_p = 98 - CpuBox.height_p
else:
height_p = cls.height_p
cls.width = round(Term.width * width_p / 100)
cls.height = round(Term.height * height_p / 100) + 1
if cls.height + Box._b_cpu_h > Term.height: cls.height = Term.height - Box._b_cpu_h
Box._b_mem_h = cls.height
cls.y = Box._b_cpu_h + 1
if CONFIG.show_disks:
cls.mem_width = ceil((cls.width - 3) / 2)
cls.disks_width = cls.width - cls.mem_width - 3
if cls.mem_width + cls.disks_width < cls.width - 2: cls.mem_width += 1
cls.divider = cls.x + cls.mem_width
else:
cls.mem_width = cls.width - 1
item_height: int = 6 if cls.swap_on and not CONFIG.swap_disk else 4
if cls.height - (3 if cls.swap_on and not CONFIG.swap_disk else 2) > 2 * item_height: cls.mem_size = 3
elif cls.mem_width > 25: cls.mem_size = 2
else: cls.mem_size = 1
cls.mem_meter = cls.width - (cls.disks_width if CONFIG.show_disks else 0) - (9 if cls.mem_size > 2 else 20)
if cls.mem_size == 1: cls.mem_meter += 6
if cls.mem_meter < 1: cls.mem_meter = 0
if CONFIG.mem_graphs:
cls.graph_height = round(((cls.height - (2 if cls.swap_on and not CONFIG.swap_disk else 1)) - (2 if cls.mem_size == 3 else 1) * item_height) / item_height)
if cls.graph_height == 0: cls.graph_height = 1
if cls.graph_height > 1: cls.mem_meter += 6
else:
cls.graph_height = 0
if CONFIG.show_disks:
cls.disk_meter = cls.width - cls.mem_width - 23
if cls.disks_width < 25:
cls.disk_meter += 10
if cls.disk_meter < 1: cls.disk_meter = 0
@classmethod
def _draw_bg(cls) -> str:
if not "mem" in cls.boxes: return ""
out: str = ""
out += f'{create_box(box=cls, line_color=THEME.mem_box)}'
if CONFIG.show_disks:
out += (f'{Mv.to(cls.y, cls.divider + 2)}{THEME.mem_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("d")}{THEME.title("isks")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}'
f'{Mv.to(cls.y, cls.divider)}{THEME.mem_box(Symbol.div_up)}'
f'{Mv.to(cls.y + cls.height - 1, cls.divider)}{THEME.mem_box(Symbol.div_down)}{THEME.div_line}'
f'{"".join(f"{Mv.to(cls.y + i, cls.divider)}{Symbol.v_line}" for i in range(1, cls.height - 1))}')
Key.mouse["d"] = [[cls.divider + 3 + i, cls.y] for i in range(5)]
else:
out += f'{Mv.to(cls.y, cls.x + cls.width - 9)}{THEME.mem_box(Symbol.title_left)}{THEME.hi_fg("d")}{THEME.title("isks")}{THEME.mem_box(Symbol.title_right)}'
Key.mouse["d"] = [[cls.x + cls.width - 8 + i, cls.y] for i in range(5)]
return out
@classmethod
def _draw_fg(cls):
if not "mem" in cls.boxes: return
mem = MemCollector
if mem.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
gbg: str = ""
gmv: str = ""
gli: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
if cls.resized or cls.redraw:
cls.redraw = True
cls._calc_size()
out_misc += cls._draw_bg()
Meters.mem = {}
Meters.swap = {}
Meters.disks_used = {}
Meters.disks_free = {}
if cls.mem_meter > 0:
for name in cls.mem_names:
if CONFIG.mem_graphs:
Meters.mem[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.vlist[name])
else:
Meters.mem[name] = Meter(mem.percent[name], cls.mem_meter, name)
if cls.swap_on:
for name in cls.swap_names:
if CONFIG.swap_disk and CONFIG.show_disks:
break
elif CONFIG.mem_graphs and not CONFIG.swap_disk:
Meters.swap[name] = Graph(cls.mem_meter, cls.graph_height, THEME.gradient[name], mem.swap_vlist[name])
else:
Meters.swap[name] = Meter(mem.swap_percent[name], cls.mem_meter, name)
if CONFIG.show_disks and mem.disks:
if CONFIG.show_io_stat or CONFIG.io_mode:
d_graph: List[str] = []
d_no_graph: List[str] = []
l_vals: List[Tuple[str, int, str, bool]] = []
if CONFIG.io_mode:
cls.disks_io_h = (cls.height - 2 - len(mem.disks)) // max(1, len(mem.disks_io_dict))
if cls.disks_io_h < 2: cls.disks_io_h = 1 if CONFIG.io_graph_combined else 2
else:
cls.disks_io_h = 1
if CONFIG.io_graph_speeds and not cls.graph_speeds:
try:
cls.graph_speeds = { spds.split(":")[0] : int(spds.split(":")[1]) for spds in list(i.strip() for i in CONFIG.io_graph_speeds.split(","))}
except (KeyError, ValueError):
errlog.error("Wrong formatting in io_graph_speeds variable. Using defaults.")
for name in mem.disks.keys():
if name in mem.disks_io_dict:
d_graph.append(name)
else:
d_no_graph.append(name)
continue
if CONFIG.io_graph_combined or not CONFIG.io_mode:
l_vals = [("rw", cls.disks_io_h, "available", False)]
else:
l_vals = [("read", cls.disks_io_h // 2, "free", False), ("write", cls.disks_io_h // 2, "used", True)]
Graphs.disk_io[name] = {_name : Graph(width=cls.disks_width - (6 if not CONFIG.io_mode else 0), height=_height, color=THEME.gradient[_gradient],
data=mem.disks_io_dict[name][_name], invert=_invert, max_value=cls.graph_speeds.get(name, 10), no_zero=True)
for _name, _height, _gradient, _invert in l_vals}
cls.disks_io_order = d_graph + d_no_graph
if cls.disk_meter > 0:
for n, name in enumerate(mem.disks.keys()):
if n * 2 > h: break
Meters.disks_used[name] = Meter(mem.disks[name]["used_percent"], cls.disk_meter, "used")
if len(mem.disks) * 3 <= h + 1:
Meters.disks_free[name] = Meter(mem.disks[name]["free_percent"], cls.disk_meter, "free")
if not "g" in Key.mouse:
Key.mouse["g"] = [[x + 8 + i, y-1] for i in range(5)]
out_misc += (f'{Mv.to(y-1, x + 7)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.mem_graphs else ""}'
f'{THEME.hi_fg("g")}{THEME.title("raph")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if CONFIG.show_disks:
if not "s" in Key.mouse:
Key.mouse["s"] = [[x + w - 6 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x + w - 7)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.swap_disk else ""}'
f'{THEME.hi_fg("s")}{THEME.title("wap")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if not "i" in Key.mouse:
Key.mouse["i"] = [[x + w - 10 + i, y-1] for i in range(2)]
out_misc += (f'{Mv.to(y-1, x + w - 11)}{THEME.mem_box(Symbol.title_left)}{Fx.b if CONFIG.io_mode else ""}'
f'{THEME.hi_fg("i")}{THEME.title("o")}{Fx.ub}{THEME.mem_box(Symbol.title_right)}')
if Collector.collect_interrupt: return
Draw.buffer("mem_misc", out_misc, only_save=True)
try:
#* Mem
cx = 1; cy = 1
out += f'{Mv.to(y, x+1)}{THEME.title}{Fx.b}Total:{mem.string["total"]:>{cls.mem_width - 9}}{Fx.ub}{THEME.main_fg}'
if cls.graph_height > 0:
gli = f'{Mv.l(2)}{THEME.mem_box(Symbol.title_right)}{THEME.div_line}{Symbol.h_line * (cls.mem_width - 1)}{"" if CONFIG.show_disks else THEME.mem_box}{Symbol.title_left}{Mv.l(cls.mem_width - 1)}{THEME.title}'
if cls.graph_height >= 2:
gbg = f'{Mv.l(1)}'
gmv = f'{Mv.l(cls.mem_width - 2)}{Mv.u(cls.graph_height - 1)}'
big_mem: bool = cls.mem_width > 21
for name in cls.mem_names:
if cy > h - 1: break
if Collector.collect_interrupt: return
if cls.mem_size > 2:
out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.string[name])))}{Fx.trans(mem.string[name])}'
f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{gmv}{str(mem.percent[name])+"%":>4}')
cy += 2 if not cls.graph_height else cls.graph_height + 1
else:
out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.mem[name](None if cls.resized else mem.percent[name])}{mem.string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}'
cy += 1 if not cls.graph_height else cls.graph_height
#* Swap
if cls.swap_on and CONFIG.show_swap and not CONFIG.swap_disk and mem.swap_string:
if h - cy > 5:
if cls.graph_height > 0: out += f'{Mv.to(y+cy, x+cx)}{gli}'
cy += 1
out += f'{Mv.to(y+cy, x+cx)}{THEME.title}{Fx.b}Swap:{mem.swap_string["total"]:>{cls.mem_width - 8}}{Fx.ub}{THEME.main_fg}'
cy += 1
for name in cls.swap_names:
if cy > h - 1: break
if Collector.collect_interrupt: return
if cls.mem_size > 2:
out += (f'{Mv.to(y+cy, x+cx)}{gli}{name.capitalize()[:None if big_mem else 5]+":":<{1 if big_mem else 6.6}}{Mv.to(y+cy, x+cx + cls.mem_width - 3 - (len(mem.swap_string[name])))}{Fx.trans(mem.swap_string[name])}'
f'{Mv.to(y+cy+1, x+cx)}{gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{gmv}{str(mem.swap_percent[name])+"%":>4}')
cy += 2 if not cls.graph_height else cls.graph_height + 1
else:
out += f'{Mv.to(y+cy, x+cx)}{name.capitalize():{5.5 if cls.mem_size > 1 else 1.1}} {gbg}{Meters.swap[name](None if cls.resized else mem.swap_percent[name])}{mem.swap_string[name][:None if cls.mem_size > 1 else -2]:>{9 if cls.mem_size > 1 else 7}}'; cy += 1 if not cls.graph_height else cls.graph_height
if cls.graph_height > 0 and not cy == h: out += f'{Mv.to(y+cy, x+cx)}{gli}'
#* Disks
if CONFIG.show_disks and mem.disks:
cx = x + cls.mem_width - 1; cy = 0
big_disk: bool = cls.disks_width >= 25
gli = f'{Mv.l(2)}{THEME.div_line}{Symbol.title_right}{Symbol.h_line * cls.disks_width}{THEME.mem_box}{Symbol.title_left}{Mv.l(cls.disks_width - 1)}'
if CONFIG.io_mode:
for name in cls.disks_io_order:
item = mem.disks[name]
io_item = mem.disks_io_dict.get(name, {})
if Collector.collect_interrupt: return
if cy > h - 1: break
out += Fx.trans(f'{Mv.to(y+cy, x+cx)}{gli}{THEME.title}{Fx.b}{item["name"]:{cls.disks_width - 2}.12}{Mv.to(y+cy, x + cx + cls.disks_width - 11)}{item["total"][:None if big_disk else -2]:>9}')
if big_disk:
out += Fx.trans(f'{Mv.to(y+cy, x + cx + (cls.disks_width // 2) - (len(str(item["used_percent"])) // 2) - 2)}{Fx.ub}{THEME.main_fg}{item["used_percent"]}%')
cy += 1
if io_item:
if cy > h - 1: break
if CONFIG.io_graph_combined:
if cls.disks_io_h <= 1:
out += f'{Mv.to(y+cy, x+cx-1)}{" " * 5}'
out += (f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{Graphs.disk_io[name]["rw"](None if cls.redraw else mem.disks_io_dict[name]["rw"][-1])}'
f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{item["io"] or "RW"}')
cy += cls.disks_io_h
else:
if cls.disks_io_h <= 3:
out += f'{Mv.to(y+cy, x+cx-1)}{" " * 5}{Mv.to(y+cy+1, x+cx-1)}{" " * 5}'
out += (f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{Graphs.disk_io[name]["read"](None if cls.redraw else mem.disks_io_dict[name]["read"][-1])}'
f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{item["io_r"] or "R"}')
cy += cls.disks_io_h // 2
out += f'{Mv.to(y+cy, x+cx-1)}{Graphs.disk_io[name]["write"](None if cls.redraw else mem.disks_io_dict[name]["write"][-1])}'
cy += cls.disks_io_h // 2
out += f'{Mv.to(y+cy-1, x+cx-1)}{THEME.main_fg}{item["io_w"] or "W"}'
else:
for name, item in mem.disks.items():
if Collector.collect_interrupt: return
if not name in Meters.disks_used:
continue
if cy > h - 1: break
out += Fx.trans(f'{Mv.to(y+cy, x+cx)}{gli}{THEME.title}{Fx.b}{item["name"]:{cls.disks_width - 2}.12}{Mv.to(y+cy, x + cx + cls.disks_width - 11)}{item["total"][:None if big_disk else -2]:>9}')
if big_disk:
out += f'{Mv.to(y+cy, x + cx + (cls.disks_width // 2) - (len(item["io"]) // 2) - 2)}{Fx.ub}{THEME.main_fg}{Fx.trans(item["io"])}'
cy += 1
if cy > h - 1: break
if CONFIG.show_io_stat and name in Graphs.disk_io:
out += f'{Mv.to(y+cy, x+cx-1)}{THEME.main_fg}{Fx.ub}{" IO: " if big_disk else " IO " + Mv.l(2)}{Fx.ub}{Graphs.disk_io[name]["rw"](None if cls.redraw else mem.disks_io_dict[name]["rw"][-1])}'
if not big_disk and item["io"]:
out += f'{Mv.to(y+cy, x+cx-1)}{Fx.ub}{THEME.main_fg}{item["io"]}'
cy += 1
if cy > h - 1: break
out += Mv.to(y+cy, x+cx) + (f'Used:{str(item["used_percent"]) + "%":>4} ' if big_disk else "U ")
out += f'{Meters.disks_used[name](None if cls.resized else mem.disks[name]["used_percent"])}{item["used"][:None if big_disk else -2]:>{9 if big_disk else 7}}'
cy += 1
if len(mem.disks) * 3 + (len(mem.disks_io_dict) if CONFIG.show_io_stat else 0) <= h + 1:
if cy > h - 1: break
out += Mv.to(y+cy, x+cx)
out += f'Free:{str(item["free_percent"]) + "%":>4} ' if big_disk else f'{"F "}'
out += f'{Meters.disks_free[name](None if cls.resized else mem.disks[name]["free_percent"])}{item["free"][:None if big_disk else -2]:>{9 if big_disk else 7}}'
cy += 1
if len(mem.disks) * 4 + (len(mem.disks_io_dict) if CONFIG.show_io_stat else 0) <= h + 1: cy += 1
except (KeyError, TypeError):
return
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.resized = cls.redraw = False
class NetBox(Box, SubBox):
name = "net"
num = 3
height_p = 30
width_p = 45
min_w: int = 36
min_h: int = 6
x = 1
y = 1
resized: bool = True
redraw: bool = True
graph_height: Dict[str, int] = {}
symbols: Dict[str, str] = {"download" : "▼", "upload" : "▲"}
buffer: str = "net"
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "net" in cls.boxes:
cls.width = Term.width
return
if not "proc" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
cls.width = round(Term.width * width_p / 100)
cls.height = Term.height - Box._b_cpu_h - Box._b_mem_h
cls.y = Term.height - cls.height + 1
cls.box_width = 27 if cls.width > 45 else 19
cls.box_height = 9 if cls.height > 10 else cls.height - 2
cls.box_x = cls.width - cls.box_width - 1
cls.box_y = cls.y + ((cls.height - 2) // 2) - cls.box_height // 2 + 1
cls.graph_height["download"] = round((cls.height - 2) / 2)
cls.graph_height["upload"] = cls.height - 2 - cls.graph_height["download"]
cls.redraw = True
@classmethod
def _draw_bg(cls) -> str:
if not "net" in cls.boxes: return ""
return f'{create_box(box=cls, line_color=THEME.net_box)}\
{create_box(x=cls.box_x, y=cls.box_y, width=cls.box_width, height=cls.box_height, line_color=THEME.div_line, fill=False, title="Download", title2="Upload")}'
@classmethod
def _draw_fg(cls):
if not "net" in cls.boxes: return
net = NetCollector
if net.redraw: cls.redraw = True
if not net.nic: return
out: str = ""
out_misc: str = ""
x, y, w, h = cls.x + 1, cls.y + 1, cls.width - 2, cls.height - 2
bx, by, bw, bh = cls.box_x + 1, cls.box_y + 1, cls.box_width - 2, cls.box_height - 2
reset: bool = bool(net.stats[net.nic]["download"]["offset"])
if cls.resized or cls.redraw:
out_misc += cls._draw_bg()
if not "b" in Key.mouse:
Key.mouse["b"] = [[x+w - len(net.nic[:10]) - 9 + i, y-1] for i in range(4)]
Key.mouse["n"] = [[x+w - 5 + i, y-1] for i in range(4)]
Key.mouse["z"] = [[x+w - len(net.nic[:10]) - 14 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 25)}{THEME.net_box}{Symbol.h_line * (10 - len(net.nic[:10]))}{Symbol.title_left}{Fx.b if reset else ""}{THEME.hi_fg("z")}{THEME.title("ero")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}'
f'{THEME.net_box}{Symbol.title_left}{Fx.b}{THEME.hi_fg("<b")} {THEME.title(net.nic[:10])} {THEME.hi_fg("n>")}{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if w - len(net.nic[:10]) - 20 > 6:
if not "a" in Key.mouse: Key.mouse["a"] = [[x+w - 20 - len(net.nic[:10]) + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 21 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if net.auto_min else ""}{THEME.hi_fg("a")}{THEME.title("uto")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if w - len(net.nic[:10]) - 20 > 13:
if not "y" in Key.mouse: Key.mouse["y"] = [[x+w - 26 - len(net.nic[:10]) + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, x+w - 27 - len(net.nic[:10]))}{THEME.net_box(Symbol.title_left)}{Fx.b if CONFIG.net_sync else ""}{THEME.title("s")}{THEME.hi_fg("y")}{THEME.title("nc")}'
f'{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
if net.address and w - len(net.nic[:10]) - len(net.address) - 20 > 15:
out_misc += (f'{Mv.to(y-1, x+7)}{THEME.net_box(Symbol.title_left)}{Fx.b}{THEME.title(net.address)}{Fx.ub}{THEME.net_box(Symbol.title_right)}{Term.fg}')
Draw.buffer("net_misc", out_misc, only_save=True)
cy = 0
for direction in ["download", "upload"]:
strings = net.strings[net.nic][direction]
stats = net.stats[net.nic][direction]
if cls.redraw: stats["redraw"] = True
if stats["redraw"] or cls.resized:
Graphs.net[direction] = Graph(w - bw - 3, cls.graph_height[direction], THEME.gradient[direction], stats["speed"], max_value=net.sync_top if CONFIG.net_sync else stats["graph_top"],
invert=direction != "download", color_max_value=net.net_min.get(direction) if CONFIG.net_color_fixed else None, round_up_low=True)
out += f'{Mv.to(y if direction == "download" else y + cls.graph_height["download"], x)}{Graphs.net[direction](None if stats["redraw"] else stats["speed"][-1])}'
out += (f'{Mv.to(by+cy, bx)}{THEME.main_fg}{cls.symbols[direction]} {strings["byte_ps"]:<10.10}' +
("" if bw < 20 else f'{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["bit_ps"] + ")":>12.12}'))
cy += 1 if bh != 3 else 2
if bh >= 6:
out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Top:"}{Mv.to(by+cy, bx+bw - 12)}{"(" + strings["top"] + ")":>12.12}'
cy += 1
if bh >= 4:
out += f'{Mv.to(by+cy, bx)}{cls.symbols[direction]} {"Total:"}{Mv.to(by+cy, bx+bw - 10)}{strings["total"]:>10.10}'
if bh > 2 and bh % 2: cy += 2
else: cy += 1
stats["redraw"] = False
out += (f'{Mv.to(y, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["download"]["graph_top"])}'
f'{Mv.to(y+h-1, x)}{THEME.graph_text(net.sync_string if CONFIG.net_sync else net.strings[net.nic]["upload"]["graph_top"])}')
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.redraw = cls.resized = False
class ProcBox(Box):
name = "proc"
num = 4
height_p = 68
width_p = 55
min_w: int = 44
min_h: int = 16
x = 1
y = 1
current_y: int = 0
current_h: int = 0
select_max: int = 0
selected: int = 0
selected_pid: int = 0
last_selection: int = 0
filtering: bool = False
moved: bool = False
start: int = 1
count: int = 0
s_len: int = 0
detailed: bool = False
detailed_x: int = 0
detailed_y: int = 0
detailed_width: int = 0
detailed_height: int = 8
resized: bool = True
redraw: bool = True
buffer: str = "proc"
pid_counter: Dict[int, int] = {}
Box.buffers.append(buffer)
@classmethod
def _calc_size(cls):
if not "proc" in cls.boxes:
cls.width = Term.width
return
width_p: int; height_p: int
if not "net" in cls.boxes and not "mem" in cls.boxes:
width_p = 100
else:
width_p = cls.width_p
if not "cpu" in cls.boxes:
height_p = 100
else:
height_p = cls.height_p
cls.width = round(Term.width * width_p / 100)
cls.height = round(Term.height * height_p / 100)
if cls.height + Box._b_cpu_h > Term.height: cls.height = Term.height - Box._b_cpu_h
cls.x = Term.width - cls.width + 1
cls.y = Box._b_cpu_h + 1
cls.current_y = cls.y
cls.current_h = cls.height
cls.select_max = cls.height - 3
cls.redraw = True
cls.resized = True
@classmethod
def _draw_bg(cls) -> str:
if not "proc" in cls.boxes: return ""
return create_box(box=cls, line_color=THEME.proc_box)
@classmethod
def selector(cls, key: str, mouse_pos: Tuple[int, int] = (0, 0)):
old: Tuple[int, int] = (cls.start, cls.selected)
new_sel: int
if key in ["up", "k"]:
if cls.selected == 1 and cls.start > 1:
cls.start -= 1
elif cls.selected == 1:
cls.selected = 0
elif cls.selected > 1:
cls.selected -= 1
elif key in ["down", "j"]:
if cls.selected == 0 and ProcCollector.detailed and cls.last_selection:
cls.selected = cls.last_selection
cls.last_selection = 0
if cls.selected == cls.select_max and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += 1
elif cls.selected < cls.select_max:
cls.selected += 1
elif key == "mouse_scroll_up" and cls.start > 1:
cls.start -= 5
elif key == "mouse_scroll_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += 5
elif key == "page_up" and cls.start > 1:
cls.start -= cls.select_max
elif key == "page_down" and cls.start < ProcCollector.num_procs - cls.select_max + 1:
cls.start += cls.select_max
elif key == "home":
if cls.start > 1: cls.start = 1
elif cls.selected > 0: cls.selected = 0
elif key == "end":
if cls.start < ProcCollector.num_procs - cls.select_max + 1: cls.start = ProcCollector.num_procs - cls.select_max + 1
elif cls.selected < cls.select_max: cls.selected = cls.select_max
elif key == "mouse_click":
if mouse_pos[0] > cls.x + cls.width - 4 and cls.current_y + 1 < mouse_pos[1] < cls.current_y + 1 + cls.select_max + 1:
if mouse_pos[1] == cls.current_y + 2:
cls.start = 1
elif mouse_pos[1] == cls.current_y + 1 + cls.select_max:
cls.start = ProcCollector.num_procs - cls.select_max + 1
else:
cls.start = round((mouse_pos[1] - cls.current_y) * ((ProcCollector.num_procs - cls.select_max - 2) / (cls.select_max - 2)))
else:
new_sel = mouse_pos[1] - cls.current_y - 1 if mouse_pos[1] >= cls.current_y - 1 else 0
if new_sel > 0 and new_sel == cls.selected:
Key.list.insert(0, "enter")
return
elif new_sel > 0 and new_sel != cls.selected:
if cls.last_selection: cls.last_selection = 0
cls.selected = new_sel
elif key == "mouse_unselect":
cls.selected = 0
if cls.start > ProcCollector.num_procs - cls.select_max + 1 and ProcCollector.num_procs > cls.select_max: cls.start = ProcCollector.num_procs - cls.select_max + 1
elif cls.start > ProcCollector.num_procs: cls.start = ProcCollector.num_procs
if cls.start < 1: cls.start = 1
if cls.selected > ProcCollector.num_procs and ProcCollector.num_procs < cls.select_max: cls.selected = ProcCollector.num_procs
elif cls.selected > cls.select_max: cls.selected = cls.select_max
if cls.selected < 0: cls.selected = 0
if old != (cls.start, cls.selected):
cls.moved = True
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True, only_draw=True)
@classmethod
def _draw_fg(cls):
if not "proc" in cls.boxes: return
proc = ProcCollector
if proc.proc_interrupt: return
if proc.redraw: cls.redraw = True
out: str = ""
out_misc: str = ""
n: int = 0
x, y, w, h = cls.x + 1, cls.current_y + 1, cls.width - 2, cls.current_h - 2
prog_len: int; arg_len: int; val: int; c_color: str; m_color: str; t_color: str; sort_pos: int; tree_len: int; is_selected: bool; calc: int
dgx: int; dgw: int; dx: int; dw: int; dy: int
l_count: int = 0
scroll_pos: int = 0
killed: bool = True
indent: str = ""
offset: int = 0
tr_show: bool = True
usr_show: bool = True
vals: List[str]
g_color: str = ""
s_len: int = 0
if proc.search_filter: s_len = len(proc.search_filter[:10])
loc_string: str = f'{cls.start + cls.selected - 1}/{proc.num_procs}'
end: str = ""
if proc.detailed:
dgx, dgw = x, w // 3
dw = w - dgw - 1
if dw > 120:
dw = 120
dgw = w - 121
dx = x + dgw + 2
dy = cls.y + 1
if w > 67:
arg_len = w - 53 - (1 if proc.num_procs > cls.select_max else 0)
prog_len = 15
else:
arg_len = 0
prog_len = w - 38 - (1 if proc.num_procs > cls.select_max else 0)
if prog_len < 15:
tr_show = False
prog_len += 5
if prog_len < 12:
usr_show = False
prog_len += 9
if CONFIG.proc_tree:
tree_len = arg_len + prog_len + 6
arg_len = 0
#* Buttons and titles only redrawn if needed
if cls.resized or cls.redraw:
s_len += len(CONFIG.proc_sorting)
if cls.resized or s_len != cls.s_len or proc.detailed:
cls.s_len = s_len
for k in ["e", "r", "c", "T", "K", "I", "enter", "left", " ", "f", "delete"]:
if k in Key.mouse: del Key.mouse[k]
if proc.detailed:
killed = proc.details.get("killed", False)
main = THEME.main_fg if cls.selected == 0 and not killed else THEME.inactive_fg
hi = THEME.hi_fg if cls.selected == 0 and not killed else THEME.inactive_fg
title = THEME.title if cls.selected == 0 and not killed else THEME.inactive_fg
if cls.current_y != cls.y + 8 or cls.resized or Graphs.detailed_cpu is NotImplemented:
cls.current_y = cls.y + 8
cls.current_h = cls.height - 8
for i in range(7): out_misc += f'{Mv.to(dy+i, x)}{" " * w}'
out_misc += (f'{Mv.to(dy+7, x-1)}{THEME.proc_box}{Symbol.title_right}{Symbol.h_line*w}{Symbol.title_left}'
f'{Mv.to(dy+7, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg(SUPERSCRIPT[cls.num])}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}{THEME.div_line}')
for i in range(7):
out_misc += f'{Mv.to(dy + i, dgx + dgw + 1)}{Symbol.v_line}'
out_misc += (f'{Mv.to(dy-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}'
f'{Mv.to(dy-1, dgx + dgw + 1)}{Symbol.div_up}'
f'{Mv.to(dy-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(str(proc.details["pid"]))}{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.title(proc.details["name"][:(dgw - 11)])}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if cls.selected == 0:
Key.mouse["enter"] = [[dx+dw-10 + i, dy-1] for i in range(7)]
if cls.selected == 0 and not killed:
Key.mouse["T"] = [[dx+2 + i, dy-1] for i in range(9)]
out_misc += (f'{Mv.to(dy-1, dx+dw - 11)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{title if cls.selected > 0 else THEME.title}close{Fx.ub} {main if cls.selected > 0 else THEME.main_fg}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}'
f'{Mv.to(dy-1, dx+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}T{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if dw > 28:
if cls.selected == 0 and not killed and not "K" in Key.mouse: Key.mouse["K"] = [[dx + 13 + i, dy-1] for i in range(4)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}K{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if dw > 39:
if cls.selected == 0 and not killed and not "I" in Key.mouse: Key.mouse["I"] = [[dx + 19 + i, dy-1] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}I{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if Graphs.detailed_cpu is NotImplemented or cls.resized:
Graphs.detailed_cpu = Graph(dgw+1, 7, THEME.gradient["cpu"], proc.details_cpu)
Graphs.detailed_mem = Graph(dw // 3, 1, None, proc.details_mem)
cls.select_max = cls.height - 11
y = cls.y + 9
h = cls.height - 10
else:
if cls.current_y != cls.y or cls.resized:
cls.current_y = cls.y
cls.current_h = cls.height
y, h = cls.y + 1, cls.height - 2
out_misc += (f'{Mv.to(y-1, x-1)}{THEME.proc_box}{Symbol.left_up}{Symbol.h_line*w}{Symbol.right_up}'
f'{Mv.to(y-1, x+1)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg(SUPERSCRIPT[cls.num])}{THEME.title(cls.name)}{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
f'{Mv.to(y+7, x-1)}{THEME.proc_box(Symbol.v_line)}{Mv.r(w)}{THEME.proc_box(Symbol.v_line)}')
cls.select_max = cls.height - 3
sort_pos = x + w - len(CONFIG.proc_sorting) - 7
if not "left" in Key.mouse:
Key.mouse["left"] = [[sort_pos + i, y-1] for i in range(3)]
Key.mouse["right"] = [[sort_pos + len(CONFIG.proc_sorting) + 3 + i, y-1] for i in range(3)]
out_misc += (f'{Mv.to(y-1, x + 8)}{THEME.proc_box(Symbol.h_line * (w - 9))}' +
("" if not proc.detailed else f"{Mv.to(dy+7, dgx + dgw + 1)}{THEME.proc_box(Symbol.div_down)}") +
f'{Mv.to(y-1, sort_pos)}{THEME.proc_box(Symbol.title_left)}{Fx.b}{THEME.hi_fg("<")} {THEME.title(CONFIG.proc_sorting)} '
f'{THEME.hi_fg(">")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 29 + s_len:
if not "e" in Key.mouse: Key.mouse["e"] = [[sort_pos - 5 + i, y-1] for i in range(4)]
out_misc += (f'{Mv.to(y-1, sort_pos - 6)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_tree else ""}'
f'{THEME.title("tre")}{THEME.hi_fg("e")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 37 + s_len:
if not "r" in Key.mouse: Key.mouse["r"] = [[sort_pos - 14 + i, y-1] for i in range(7)]
out_misc += (f'{Mv.to(y-1, sort_pos - 15)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_reversed else ""}'
f'{THEME.hi_fg("r")}{THEME.title("everse")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if w > 47 + s_len:
if not "c" in Key.mouse: Key.mouse["c"] = [[sort_pos - 24 + i, y-1] for i in range(8)]
out_misc += (f'{Mv.to(y-1, sort_pos - 25)}{THEME.proc_box(Symbol.title_left)}{Fx.b if CONFIG.proc_per_core else ""}'
f'{THEME.title("per-")}{THEME.hi_fg("c")}{THEME.title("ore")}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
if not "f" in Key.mouse or cls.resized: Key.mouse["f"] = [[x+6 + i, y-1] for i in range(6 if not proc.search_filter else 2 + len(proc.search_filter[-10:]))]
if proc.search_filter:
if not "delete" in Key.mouse: Key.mouse["delete"] = [[x+12 + len(proc.search_filter[-10:]) + i, y-1] for i in range(3)]
elif "delete" in Key.mouse:
del Key.mouse["delete"]
out_misc += (f'{Mv.to(y-1, x + 8)}{THEME.proc_box(Symbol.title_left)}{Fx.b if cls.filtering or proc.search_filter else ""}{THEME.hi_fg("F" if cls.filtering and proc.case_sensitive else "f")}{THEME.title}' +
("ilter" if not proc.search_filter and not cls.filtering else f' {proc.search_filter[-(10 if w < 83 else w - 74):]}{(Fx.bl + "█" + Fx.ubl) if cls.filtering else THEME.hi_fg(" del")}') +
f'{THEME.proc_box(Symbol.title_right)}')
main = THEME.inactive_fg if cls.selected == 0 else THEME.main_fg
hi = THEME.inactive_fg if cls.selected == 0 else THEME.hi_fg
title = THEME.inactive_fg if cls.selected == 0 else THEME.title
out_misc += (f'{Mv.to(y+h, x + 1)}{THEME.proc_box}{Symbol.h_line*(w-4)}'
f'{Mv.to(y+h, x+1)}{THEME.proc_box(Symbol.title_left)}{main}{Symbol.up} {Fx.b}{THEME.main_fg("select")} {Fx.ub}'
f'{THEME.inactive_fg if cls.selected == cls.select_max else THEME.main_fg}{Symbol.down}{THEME.proc_box(Symbol.title_right)}'
f'{THEME.proc_box(Symbol.title_left)}{title}{Fx.b}info {Fx.ub}{main}{Symbol.enter}{THEME.proc_box(Symbol.title_right)}')
if not "enter" in Key.mouse: Key.mouse["enter"] = [[x + 14 + i, y+h] for i in range(6)]
if w - len(loc_string) > 34:
if not "T" in Key.mouse: Key.mouse["T"] = [[x + 22 + i, y+h] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}T{title}erminate{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if w - len(loc_string) > 40:
if not "K" in Key.mouse: Key.mouse["K"] = [[x + 33 + i, y+h] for i in range(4)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}K{title}ill{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if w - len(loc_string) > 51:
if not "I" in Key.mouse: Key.mouse["I"] = [[x + 39 + i, y+h] for i in range(9)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}I{title}nterrupt{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
if CONFIG.proc_tree and w - len(loc_string) > 65:
if not " " in Key.mouse: Key.mouse[" "] = [[x + 50 + i, y+h] for i in range(12)]
out_misc += f'{THEME.proc_box(Symbol.title_left)}{Fx.b}{hi}spc {title}collapse{Fx.ub}{THEME.proc_box(Symbol.title_right)}'
#* Processes labels
selected: str = CONFIG.proc_sorting
label: str
if selected == "memory": selected = "mem"
if selected == "threads" and not CONFIG.proc_tree and not arg_len: selected = "tr"
if CONFIG.proc_tree:
label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{" Tree:":<{tree_len-2}}' + (f'{"Threads: ":<9}' if tr_show else " "*4) + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' +
(" " if proc.num_procs > cls.select_max else ""))
if selected in ["pid", "program", "arguments"]: selected = "tree"
else:
label = (f'{THEME.title}{Fx.b}{Mv.to(y, x)}{"Pid:":>7} {"Program:" if prog_len > 8 else "Prg:":<{prog_len}}' + (f'{"Arguments:":<{arg_len-4}}' if arg_len else "") +
((f'{"Threads:":<9}' if arg_len else f'{"Tr:":^5}') if tr_show else "") + (f'{"User:":<9}' if usr_show else "") + f'Mem%{"Cpu%":>11}{Fx.ub}{THEME.main_fg} ' +
(" " if proc.num_procs > cls.select_max else ""))
if selected == "program" and prog_len <= 8: selected = "prg"
selected = selected.split(" ")[0].capitalize()
if CONFIG.proc_mem_bytes: label = label.replace("Mem%", "MemB")
label = label.replace(selected, f'{Fx.u}{selected}{Fx.uu}')
out_misc += label
Draw.buffer("proc_misc", out_misc, only_save=True)
#* Detailed box draw
if proc.detailed:
if proc.details["status"] == psutil.STATUS_RUNNING: stat_color = Fx.b
elif proc.details["status"] in [psutil.STATUS_DEAD, psutil.STATUS_STOPPED, psutil.STATUS_ZOMBIE]: stat_color = f'{THEME.inactive_fg}'
else: stat_color = ""
expand = proc.expand
iw = (dw - 3) // (4 + expand)
iw2 = iw - 1
out += (f'{Mv.to(dy, dgx)}{Graphs.detailed_cpu(None if cls.moved or proc.details["killed"] else proc.details_cpu[-1])}'
f'{Mv.to(dy, dgx)}{THEME.title}{Fx.b}{0 if proc.details["killed"] else proc.details["cpu_percent"]}%{Mv.r(1)}{"" if SYSTEM == "MacOS" else (("C" if dgw < 20 else "Core") + str(proc.details["cpu_num"]))}')
for i, l in enumerate(["C", "P", "U"]):
out += f'{Mv.to(dy+2+i, dgx)}{l}'
for i, l in enumerate(["C", "M", "D"]):
out += f'{Mv.to(dy+4+i, dx+1)}{l}'
out += (f'{Mv.to(dy, dx+1)} {"Status:":^{iw}.{iw2}}{"Elapsed:":^{iw}.{iw2}}' +
(f'{"Parent:":^{iw}.{iw2}}' if dw > 28 else "") + (f'{"User:":^{iw}.{iw2}}' if dw > 38 else "") +
(f'{"Threads:":^{iw}.{iw2}}' if expand > 0 else "") + (f'{"Nice:":^{iw}.{iw2}}' if expand > 1 else "") +
(f'{"IO Read:":^{iw}.{iw2}}' if expand > 2 else "") + (f'{"IO Write:":^{iw}.{iw2}}' if expand > 3 else "") +
(f'{"TTY:":^{iw}.{iw2}}' if expand > 4 else "") +
f'{Mv.to(dy+1, dx+1)}{Fx.ub}{THEME.main_fg}{stat_color}{proc.details["status"]:^{iw}.{iw2}}{Fx.ub}{THEME.main_fg}{proc.details["uptime"]:^{iw}.{iw2}} ' +
(f'{proc.details["parent_name"]:^{iw}.{iw2}}' if dw > 28 else "") + (f'{proc.details["username"]:^{iw}.{iw2}}' if dw > 38 else "") +
(f'{proc.details["threads"]:^{iw}.{iw2}}' if expand > 0 else "") + (f'{proc.details["nice"]:^{iw}.{iw2}}' if expand > 1 else "") +
(f'{proc.details["io_read"]:^{iw}.{iw2}}' if expand > 2 else "") + (f'{proc.details["io_write"]:^{iw}.{iw2}}' if expand > 3 else "") +
(f'{proc.details["terminal"][-(iw2):]:^{iw}.{iw2}}' if expand > 4 else "") +
f'{Mv.to(dy+3, dx)}{THEME.title}{Fx.b}{("Memory: " if dw > 42 else "M:") + str(round(proc.details["memory_percent"], 1)) + "%":>{dw//3-1}}{Fx.ub} {THEME.inactive_fg}{"⡀"*(dw//3)}'
f'{Mv.l(dw//3)}{THEME.proc_misc}{Graphs.detailed_mem(None if cls.moved else proc.details_mem[-1])} '
f'{THEME.title}{Fx.b}{proc.details["memory_bytes"]:.{dw//3 - 2}}{THEME.main_fg}{Fx.ub}')
cy = dy + (4 if len(proc.details["cmdline"]) > dw - 5 else 5)
for i in range(ceil(len(proc.details["cmdline"]) / (dw - 5))):
out += f'{Mv.to(cy+i, dx + 3)}{proc.details["cmdline"][((dw-5)*i):][:(dw-5)]:{"^" if i == 0 else "<"}{dw-5}}'
if i == 2: break
#* Checking for selection out of bounds
if cls.start > proc.num_procs - cls.select_max + 1 and proc.num_procs > cls.select_max: cls.start = proc.num_procs - cls.select_max + 1
elif cls.start > proc.num_procs: cls.start = proc.num_procs
if cls.start < 1: cls.start = 1
if cls.selected > proc.num_procs and proc.num_procs < cls.select_max: cls.selected = proc.num_procs
elif cls.selected > cls.select_max: cls.selected = cls.select_max
if cls.selected < 0: cls.selected = 0
#* Start iteration over all processes and info
cy = 1
for n, (pid, items) in enumerate(proc.processes.items(), start=1):
if n < cls.start: continue
l_count += 1
if l_count == cls.selected:
is_selected = True
cls.selected_pid = pid
else: is_selected = False
indent, name, cmd, threads, username, mem, mem_b, cpu = [items.get(v, d) for v, d in [("indent", ""), ("name", ""), ("cmd", ""), ("threads", 0), ("username", "?"), ("mem", 0.0), ("mem_b", 0), ("cpu", 0.0)]]
if CONFIG.proc_tree:
arg_len = 0
offset = tree_len - len(f'{indent}{pid}')
if offset < 1: offset = 0
indent = f'{indent:.{tree_len - len(str(pid))}}'
if offset - len(name) > 12:
cmd = cmd.split(" ")[0].split("/")[-1]
if not cmd.startswith(name):
offset = len(name)
arg_len = tree_len - len(f'{indent}{pid} {name} ') + 2
cmd = f'({cmd[:(arg_len-4)]})'
else:
offset = prog_len - 1
if cpu > 1.0 or pid in Graphs.pid_cpu:
if pid not in Graphs.pid_cpu:
Graphs.pid_cpu[pid] = Graph(5, 1, None, [0])
cls.pid_counter[pid] = 0
elif cpu < 1.0:
cls.pid_counter[pid] += 1
if cls.pid_counter[pid] > 10:
del cls.pid_counter[pid], Graphs.pid_cpu[pid]
else:
cls.pid_counter[pid] = 0
end = f'{THEME.main_fg}{Fx.ub}' if CONFIG.proc_colors else Fx.ub
if cls.selected > cy: calc = cls.selected - cy
elif 0 < cls.selected <= cy: calc = cy - cls.selected
else: calc = cy
if CONFIG.proc_colors and not is_selected:
vals = []
for v in [int(cpu), int(mem), int(threads // 3)]:
if CONFIG.proc_gradient:
val = ((v if v <= 100 else 100) + 100) - calc * 100 // cls.select_max
vals += [f'{THEME.gradient["proc_color" if val < 100 else "process"][val if val < 100 else val - 100]}']
else:
vals += [f'{THEME.gradient["process"][v if v <= 100 else 100]}']
c_color, m_color, t_color = vals
else:
c_color = m_color = t_color = Fx.b
if CONFIG.proc_gradient and not is_selected:
g_color = f'{THEME.gradient["proc"][calc * 100 // cls.select_max]}'
if is_selected:
c_color = m_color = t_color = g_color = end = ""
out += f'{THEME.selected_bg}{THEME.selected_fg}{Fx.b}'
#* Creates one line for a process with all gathered information
out += (f'{Mv.to(y+cy, x)}{g_color}{indent}{pid:>{(1 if CONFIG.proc_tree else 7)}} ' +
f'{c_color}{name:<{offset}.{offset}} {end}' +
(f'{g_color}{cmd:<{arg_len}.{arg_len-1}}' if arg_len else "") +
(t_color + (f'{threads:>4} ' if threads < 1000 else "999> ") + end if tr_show else "") +
(g_color + (f'{username:<9.9}' if len(username) < 10 else f'{username[:8]:<8}+') if usr_show else "") +
m_color + ((f'{mem:>4.1f}' if mem < 100 else f'{mem:>4.0f} ') if not CONFIG.proc_mem_bytes else f'{floating_humanizer(mem_b, short=True):>4.4}') + end +
f' {THEME.inactive_fg}{"⡀"*5}{THEME.main_fg}{g_color}{c_color}' + (f' {cpu:>4.1f} ' if cpu < 100 else f'{cpu:>5.0f} ') + end +
(" " if proc.num_procs > cls.select_max else ""))
#* Draw small cpu graph for process if cpu usage was above 1% in the last 10 updates
if pid in Graphs.pid_cpu:
out += f'{Mv.to(y+cy, x + w - (12 if proc.num_procs > cls.select_max else 11))}{c_color if CONFIG.proc_colors else THEME.proc_misc}{Graphs.pid_cpu[pid](None if cls.moved else round(cpu))}{THEME.main_fg}'
if is_selected: out += f'{Fx.ub}{Term.fg}{Term.bg}{Mv.to(y+cy, x + w - 1)}{" " if proc.num_procs > cls.select_max else ""}'
cy += 1
if cy == h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+cy+i, x)}{" " * w}'
#* Draw scrollbar if needed
if proc.num_procs > cls.select_max:
if cls.resized:
Key.mouse["mouse_scroll_up"] = [[x+w-2+i, y] for i in range(3)]
Key.mouse["mouse_scroll_down"] = [[x+w-2+i, y+h-1] for i in range(3)]
scroll_pos = round(cls.start * (cls.select_max - 2) / (proc.num_procs - (cls.select_max - 2)))
if scroll_pos < 0 or cls.start == 1: scroll_pos = 0
elif scroll_pos > h - 3 or cls.start >= proc.num_procs - cls.select_max: scroll_pos = h - 3
out += (f'{Mv.to(y, x+w-1)}{Fx.b}{THEME.main_fg}↑{Mv.to(y+h-1, x+w-1)}↓{Fx.ub}'
f'{Mv.to(y+1+scroll_pos, x+w-1)}█')
elif "scroll_up" in Key.mouse:
del Key.mouse["scroll_up"], Key.mouse["scroll_down"]
#* Draw current selection and number of processes
out += (f'{Mv.to(y+h, x + w - 3 - len(loc_string))}{THEME.proc_box}{Symbol.title_left}{THEME.title}'
f'{Fx.b}{loc_string}{Fx.ub}{THEME.proc_box(Symbol.title_right)}')
#* Clean up dead processes graphs and counters
cls.count += 1
if cls.count == 100:
cls.count == 0
for p in list(cls.pid_counter):
if not psutil.pid_exists(p):
del cls.pid_counter[p], Graphs.pid_cpu[p]
Draw.buffer(cls.buffer, f'{out_misc}{out}{Term.fg}', only_save=Menu.active)
cls.redraw = cls.resized = cls.moved = False
class Collector:
'''Data collector master class
* .start(): Starts collector thread
* .stop(): Stops collector thread
* .collect(*collectors: Collector, draw_now: bool = True, interrupt: bool = False): queues up collectors to run'''
stopping: bool = False
started: bool = False
draw_now: bool = False
redraw: bool = False
only_draw: bool = False
thread: threading.Thread
collect_run = threading.Event()
collect_idle = threading.Event()
collect_idle.set()
collect_done = threading.Event()
collect_queue: List = []
collect_interrupt: bool = False
proc_interrupt: bool = False
use_draw_list: bool = False
proc_counter: int = 1
@classmethod
def start(cls):
cls.stopping = False
cls.thread = threading.Thread(target=cls._runner, args=())
cls.thread.start()
cls.started = True
@classmethod
def stop(cls):
if cls.started and cls.thread.is_alive():
cls.stopping = True
cls.started = False
cls.collect_queue = []
cls.collect_idle.set()
cls.collect_done.set()
try:
cls.thread.join()
except:
pass
@classmethod
def _runner(cls):
'''This is meant to run in it's own thread, collecting and drawing when collect_run is set'''
draw_buffers: List[str] = []
debugged: bool = False
try:
while not cls.stopping:
if CONFIG.draw_clock and CONFIG.update_ms != 1000: Box.draw_clock()
cls.collect_run.wait(0.1)
if not cls.collect_run.is_set():
continue
draw_buffers = []
cls.collect_interrupt = False
cls.collect_run.clear()
cls.collect_idle.clear()
cls.collect_done.clear()
if DEBUG and not debugged: TimeIt.start("Collect and draw")
while cls.collect_queue:
collector = cls.collect_queue.pop()
if not cls.only_draw:
collector._collect()
collector._draw()
if cls.use_draw_list: draw_buffers.append(collector.buffer)
if cls.collect_interrupt: break
if DEBUG and not debugged: TimeIt.stop("Collect and draw"); debugged = True
if cls.draw_now and not Menu.active and not cls.collect_interrupt:
if cls.use_draw_list: Draw.out(*draw_buffers)
else: Draw.out()
if CONFIG.draw_clock and CONFIG.update_ms == 1000: Box.draw_clock()
cls.collect_idle.set()
cls.collect_done.set()
except Exception as e:
errlog.exception(f'Data collection thread failed with exception: {e}')
cls.collect_idle.set()
cls.collect_done.set()
clean_quit(1, thread=True)
@classmethod
def collect(cls, *collectors, draw_now: bool = True, interrupt: bool = False, proc_interrupt: bool = False, redraw: bool = False, only_draw: bool = False):
'''Setup collect queue for _runner'''
cls.collect_interrupt = interrupt
cls.proc_interrupt = proc_interrupt
cls.collect_idle.wait()
cls.collect_interrupt = False
cls.proc_interrupt = False
cls.use_draw_list = False
cls.draw_now = draw_now
cls.redraw = redraw
cls.only_draw = only_draw
if collectors:
cls.collect_queue = [*collectors]
cls.use_draw_list = True
if ProcCollector in cls.collect_queue:
cls.proc_counter = 1
else:
cls.collect_queue = list(cls.__subclasses__())
if CONFIG.proc_update_mult > 1:
if cls.proc_counter > 1:
cls.collect_queue.remove(ProcCollector)
if cls.proc_counter == CONFIG.proc_update_mult:
cls.proc_counter = 0
cls.proc_counter += 1
cls.collect_run.set()
class CpuCollector(Collector):
'''Collects cpu usage for cpu and cores, cpu frequency, load_avg, uptime and cpu temps'''
cpu_usage: List[List[int]] = []
cpu_upper: List[int] = []
cpu_lower: List[int] = []
cpu_temp: List[List[int]] = []
cpu_temp_high: int = 0
cpu_temp_crit: int = 0
for _ in range(THREADS + 1):
cpu_usage.append([])
cpu_temp.append([])
freq_error: bool = False
cpu_freq: int = 0
load_avg: List[float] = []
uptime: str = ""
buffer: str = CpuBox.buffer
sensor_method: str = ""
got_sensors: bool = False
sensor_swap: bool = False
cpu_temp_only: bool = False
@classmethod
def get_sensors(cls):
'''Check if we can get cpu temps and return method of getting temps'''
cls.sensor_method = ""
if SYSTEM == "MacOS":
try:
if which("coretemp") and subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip().replace("-", "").isdigit():
cls.sensor_method = "coretemp"
elif which("osx-cpu-temp") and subprocess.check_output("osx-cpu-temp", universal_newlines=True).rstrip().endswith("°C"):
cls.sensor_method = "osx-cpu-temp"
except: pass
elif CONFIG.cpu_sensor != "Auto" and CONFIG.cpu_sensor in CONFIG.cpu_sensors:
cls.sensor_method = "psutil"
elif hasattr(psutil, "sensors_temperatures"):
try:
temps = psutil.sensors_temperatures()
if temps:
for name, entries in temps.items():
if name.lower().startswith("cpu"):
cls.sensor_method = "psutil"
break
for entry in entries:
if entry.label.startswith(("Package", "Core 0", "Tdie", "CPU")):
cls.sensor_method = "psutil"
break
except: pass
if not cls.sensor_method and SYSTEM == "Linux":
try:
if which("vcgencmd") and subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip().endswith("'C"):
cls.sensor_method = "vcgencmd"
except: pass
cls.got_sensors = bool(cls.sensor_method)
@classmethod
def _collect(cls):
cls.cpu_usage[0].append(ceil(psutil.cpu_percent(percpu=False)))
if len(cls.cpu_usage[0]) > Term.width * 4:
del cls.cpu_usage[0][0]
cpu_times_percent = psutil.cpu_times_percent()
for x in ["upper", "lower"]:
if getattr(CONFIG, "cpu_graph_" + x) == "total":
setattr(cls, "cpu_" + x, cls.cpu_usage[0])
else:
getattr(cls, "cpu_" + x).append(ceil(getattr(cpu_times_percent, getattr(CONFIG, "cpu_graph_" + x))))
if len(getattr(cls, "cpu_" + x)) > Term.width * 4:
del getattr(cls, "cpu_" + x)[0]
for n, thread in enumerate(psutil.cpu_percent(percpu=True), start=1):
cls.cpu_usage[n].append(ceil(thread))
if len(cls.cpu_usage[n]) > Term.width * 2:
del cls.cpu_usage[n][0]
try:
if hasattr(psutil.cpu_freq(), "current"):
cls.cpu_freq = round(psutil.cpu_freq().current)
except Exception as e:
if not cls.freq_error:
cls.freq_error = True
errlog.error("Exception while getting cpu frequency!")
errlog.exception(f'{e}')
else:
pass
cls.load_avg = [round(lavg, 2) for lavg in psutil.getloadavg()]
cls.uptime = str(timedelta(seconds=round(time()-psutil.boot_time(),0)))[:-3].replace(" days,", "d").replace(" day,", "d")
if CONFIG.check_temp and cls.got_sensors:
cls._collect_temps()
@classmethod
def _collect_temps(cls):
temp: int = 1000
cores: List[int] = []
core_dict: Dict[int, int] = {}
entry_int: int = 0
cpu_type: str = ""
c_max: int = 0
s_name: str = "_-_"
s_label: str = "_-_"
if cls.sensor_method == "psutil":
try:
if CONFIG.cpu_sensor != "Auto":
s_name, s_label = CONFIG.cpu_sensor.split(":", 1)
for name, entries in psutil.sensors_temperatures().items():
for num, entry in enumerate(entries, 1):
if name == s_name and (entry.label == s_label or str(num) == s_label):
if entry.label.startswith("Package"):
cpu_type = "intel"
elif entry.label.startswith("Tdie"):
cpu_type = "ryzen"
else:
cpu_type = "other"
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 95
temp = round(entry.current)
elif entry.label.startswith(("Package", "Tdie")) and cpu_type in ["", "other"] and s_name == "_-_" and hasattr(entry, "current"):
if not cls.cpu_temp_high or cls.sensor_swap or cpu_type == "other":
cls.sensor_swap = False
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 95
cpu_type = "intel" if entry.label.startswith("Package") else "ryzen"
temp = round(entry.current)
elif (entry.label.startswith(("Core", "Tccd", "CPU")) or (name.lower().startswith("cpu") and not entry.label)) and hasattr(entry, "current"):
if entry.label.startswith(("Core", "Tccd")):
entry_int = int(entry.label.replace("Core", "").replace("Tccd", ""))
if entry_int in core_dict and cpu_type != "ryzen":
if c_max == 0:
c_max = max(core_dict) + 1
if c_max < THREADS // 2 and (entry_int + c_max) not in core_dict:
core_dict[(entry_int + c_max)] = round(entry.current)
continue
elif entry_int in core_dict:
continue
core_dict[entry_int] = round(entry.current)
continue
elif cpu_type in ["intel", "ryzen"]:
continue
if not cpu_type:
cpu_type = "other"
if not cls.cpu_temp_high or cls.sensor_swap:
cls.sensor_swap = False
if getattr(entry, "high", None) != None and entry.high > 1: cls.cpu_temp_high = round(entry.high)
else: cls.cpu_temp_high = 60 if name == "cpu_thermal" else 80
if getattr(entry, "critical", None) != None and entry.critical > 1: cls.cpu_temp_crit = round(entry.critical)
else: cls.cpu_temp_crit = 80 if name == "cpu_thermal" else 95
temp = round(entry.current)
cores.append(round(entry.current))
if core_dict:
if not temp or temp == 1000:
temp = sum(core_dict.values()) // len(core_dict)
if not cls.cpu_temp_high or not cls.cpu_temp_crit:
cls.cpu_temp_high, cls.cpu_temp_crit = 80, 95
cls.cpu_temp[0].append(temp)
if cpu_type == "ryzen":
ccds: int = len(core_dict)
cores_per_ccd: int = CORES // ccds
z: int = 1
for x in range(THREADS):
if x == CORES:
z = 1
if CORE_MAP[x] + 1 > cores_per_ccd * z:
z += 1
if z in core_dict:
cls.cpu_temp[x+1].append(core_dict[z])
else:
for x in range(THREADS):
if CORE_MAP[x] in core_dict:
cls.cpu_temp[x+1].append(core_dict[CORE_MAP[x]])
elif len(cores) == THREADS / 2:
cls.cpu_temp[0].append(temp)
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
cls.cpu_temp[THREADS // 2 + n].append(t)
except IndexError:
break
else:
cls.cpu_temp[0].append(temp)
if len(cores) > 1:
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
except IndexError:
break
except Exception as e:
errlog.exception(f'{e}')
cls.got_sensors = False
CpuBox._calc_size()
else:
try:
if cls.sensor_method == "coretemp":
temp = max(0, int(subprocess.check_output(["coretemp", "-p"], universal_newlines=True).strip()))
cores = [max(0, int(x)) for x in subprocess.check_output("coretemp", universal_newlines=True).split()]
if len(cores) == THREADS / 2:
cls.cpu_temp[0].append(temp)
for n, t in enumerate(cores, start=1):
try:
cls.cpu_temp[n].append(t)
cls.cpu_temp[THREADS // 2 + n].append(t)
except IndexError:
break
else:
cores.insert(0, temp)
for n, t in enumerate(cores):
try:
cls.cpu_temp[n].append(t)
except IndexError:
break
if not cls.cpu_temp_high:
cls.cpu_temp_high = 85
cls.cpu_temp_crit = 100
elif cls.sensor_method == "osx-cpu-temp":
temp = max(0, round(float(subprocess.check_output("osx-cpu-temp", universal_newlines=True).strip()[:-2])))
if not cls.cpu_temp_high:
cls.cpu_temp_high = 85
cls.cpu_temp_crit = 100
elif cls.sensor_method == "vcgencmd":
temp = max(0, round(float(subprocess.check_output(["vcgencmd", "measure_temp"], universal_newlines=True).strip()[5:-2])))
if not cls.cpu_temp_high:
cls.cpu_temp_high = 60
cls.cpu_temp_crit = 80
except Exception as e:
errlog.exception(f'{e}')
cls.got_sensors = False
CpuBox._calc_size()
else:
if not cores:
cls.cpu_temp[0].append(temp)
if not core_dict and len(cores) <= 1:
cls.cpu_temp_only = True
if len(cls.cpu_temp[0]) > 5:
for n in range(len(cls.cpu_temp)):
if cls.cpu_temp[n]:
del cls.cpu_temp[n][0]
@classmethod
def _draw(cls):
CpuBox._draw_fg()
class MemCollector(Collector):
'''Collects memory and disks information'''
values: Dict[str, int] = {}
vlist: Dict[str, List[int]] = {}
percent: Dict[str, int] = {}
string: Dict[str, str] = {}
swap_values: Dict[str, int] = {}
swap_vlist: Dict[str, List[int]] = {}
swap_percent: Dict[str, int] = {}
swap_string: Dict[str, str] = {}
disks: Dict[str, Dict]
disk_hist: Dict[str, Tuple] = {}
timestamp: float = time()
disks_io_dict: Dict[str, Dict[str, List[int]]] = {}
recheck_diskutil: bool = True
diskutil_map: Dict[str, str] = {}
io_error: bool = False
old_disks: List[str] = []
old_io_disks: List[str] = []
fstab_filter: List[str] = []
excludes: List[str] = ["squashfs", "nullfs"]
if SYSTEM == "BSD": excludes += ["devfs", "tmpfs", "procfs", "linprocfs", "gvfs", "fusefs"]
buffer: str = MemBox.buffer
@classmethod
def _collect(cls):
#* Collect memory
mem = psutil.virtual_memory()
if hasattr(mem, "cached"):
cls.values["cached"] = mem.cached
else:
cls.values["cached"] = mem.active
cls.values["total"], cls.values["free"], cls.values["available"] = mem.total, mem.free, mem.available
cls.values["used"] = cls.values["total"] - cls.values["available"]
for key, value in cls.values.items():
cls.string[key] = floating_humanizer(value)
if key == "total": continue
cls.percent[key] = round(value * 100 / cls.values["total"])
if CONFIG.mem_graphs:
if not key in cls.vlist: cls.vlist[key] = []
cls.vlist[key].append(cls.percent[key])
if len(cls.vlist[key]) > MemBox.width: del cls.vlist[key][0]
#* Collect swap
if CONFIG.show_swap or CONFIG.swap_disk:
swap = psutil.swap_memory()
cls.swap_values["total"], cls.swap_values["free"] = swap.total, swap.free
cls.swap_values["used"] = cls.swap_values["total"] - cls.swap_values["free"]
if swap.total:
if not MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = True
for key, value in cls.swap_values.items():
cls.swap_string[key] = floating_humanizer(value)
if key == "total": continue
cls.swap_percent[key] = round(value * 100 / cls.swap_values["total"])
if CONFIG.mem_graphs:
if not key in cls.swap_vlist: cls.swap_vlist[key] = []
cls.swap_vlist[key].append(cls.swap_percent[key])
if len(cls.swap_vlist[key]) > MemBox.width: del cls.swap_vlist[key][0]
else:
if MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = False
else:
if MemBox.swap_on:
MemBox.redraw = True
MemBox.swap_on = False
if not CONFIG.show_disks: return
#* Collect disks usage
disk_read: int = 0
disk_write: int = 0
dev_name: str
disk_name: str
filtering: Tuple = ()
filter_exclude: bool = False
io_string_r: str
io_string_w: str
u_percent: int
cls.disks = {}
if CONFIG.disks_filter:
if CONFIG.disks_filter.startswith("exclude="):
filter_exclude = True
filtering = tuple(v.strip() for v in CONFIG.disks_filter.replace("exclude=", "").strip().split(","))
else:
filtering = tuple(v.strip() for v in CONFIG.disks_filter.strip().split(","))
try:
io_counters = psutil.disk_io_counters(perdisk=SYSTEM != "BSD", nowrap=True)
except ValueError as e:
if not cls.io_error:
cls.io_error = True
errlog.error(f'Non fatal error during disk io collection!')
if psutil.version_info[0] < 5 or (psutil.version_info[0] == 5 and psutil.version_info[1] < 7):
errlog.error(f'Caused by outdated psutil version.')
errlog.exception(f'{e}')
io_counters = None
if SYSTEM == "MacOS" and cls.recheck_diskutil:
cls.recheck_diskutil = False
try:
dutil_out = subprocess.check_output(["diskutil", "list", "physical"], universal_newlines=True)
for line in dutil_out.split("\n"):
line = line.replace("\u2068", "").replace("\u2069", "")
if line.startswith("/dev/"):
xdisk = line.split()[0].replace("/dev/", "")
elif "Container" in line:
ydisk = line.split()[3]
if xdisk and ydisk:
cls.diskutil_map[xdisk] = ydisk
xdisk = ydisk = ""
except:
pass
if CONFIG.use_fstab and SYSTEM != "MacOS" and not cls.fstab_filter:
try:
with open('/etc/fstab','r') as fstab:
for line in fstab:
line = line.strip()
if line and not line.startswith('#'):
mount_data = (line.split())
if mount_data[2].lower() != "swap":
cls.fstab_filter += [mount_data[1]]
errlog.debug(f'new fstab_filter set : {cls.fstab_filter}')
except IOError:
CONFIG.use_fstab = False
errlog.warning(f'Error reading fstab, use_fstab flag reset to {CONFIG.use_fstab}')
if not CONFIG.use_fstab and cls.fstab_filter:
cls.fstab_filter = []
errlog.debug(f'use_fstab flag has been turned to {CONFIG.use_fstab}, fstab_filter cleared')
for disk in psutil.disk_partitions(all=CONFIG.use_fstab or not CONFIG.only_physical):
disk_io = None
io_string_r = io_string_w = ""
if CONFIG.use_fstab and disk.mountpoint not in cls.fstab_filter:
continue
disk_name = disk.mountpoint.rsplit('/', 1)[-1] if not disk.mountpoint == "/" else "root"
if cls.excludes and disk.fstype in cls.excludes:
continue
if filtering and ((not filter_exclude and not disk.mountpoint in filtering) or (filter_exclude and disk.mountpoint in filtering)):
continue
if SYSTEM == "MacOS" and disk.mountpoint == "/private/var/vm":
continue
try:
disk_u = psutil.disk_usage(disk.mountpoint)
except:
pass
u_percent = round(getattr(disk_u, "percent", 0))
cls.disks[disk.device] = { "name" : disk_name, "used_percent" : u_percent, "free_percent" : 100 - u_percent }
for name in ["total", "used", "free"]:
cls.disks[disk.device][name] = floating_humanizer(getattr(disk_u, name, 0))
#* Collect disk io
if io_counters:
try:
if SYSTEM != "BSD":
dev_name = os.path.realpath(disk.device).rsplit('/', 1)[-1]
if not dev_name in io_counters:
for names in io_counters:
if names in dev_name:
disk_io = io_counters[names]
break
else:
if cls.diskutil_map:
for names, items in cls.diskutil_map.items():
if items in dev_name and names in io_counters:
disk_io = io_counters[names]
else:
disk_io = io_counters[dev_name]
elif disk.mountpoint == "/":
disk_io = io_counters
else:
raise Exception
disk_read = round((disk_io.read_bytes - cls.disk_hist[disk.device][0]) / (time() - cls.timestamp)) #type: ignore
disk_write = round((disk_io.write_bytes - cls.disk_hist[disk.device][1]) / (time() - cls.timestamp)) #type: ignore
if not disk.device in cls.disks_io_dict:
cls.disks_io_dict[disk.device] = {"read" : [], "write" : [], "rw" : []}
cls.disks_io_dict[disk.device]["read"].append(disk_read >> 20)
cls.disks_io_dict[disk.device]["write"].append(disk_write >> 20)
cls.disks_io_dict[disk.device]["rw"].append((disk_read + disk_write) >> 20)
if len(cls.disks_io_dict[disk.device]["read"]) > MemBox.width:
del cls.disks_io_dict[disk.device]["read"][0], cls.disks_io_dict[disk.device]["write"][0], cls.disks_io_dict[disk.device]["rw"][0]
except:
disk_read = disk_write = 0
else:
disk_read = disk_write = 0
if disk_io:
cls.disk_hist[disk.device] = (disk_io.read_bytes, disk_io.write_bytes)
if CONFIG.io_mode or MemBox.disks_width > 30:
if disk_read > 0:
io_string_r = f'▲{floating_humanizer(disk_read, short=True)}'
if disk_write > 0:
io_string_w = f'▼{floating_humanizer(disk_write, short=True)}'
if CONFIG.io_mode:
cls.disks[disk.device]["io_r"] = io_string_r
cls.disks[disk.device]["io_w"] = io_string_w
elif disk_read + disk_write > 0:
io_string_r += f'▼▲{floating_humanizer(disk_read + disk_write, short=True)}'
cls.disks[disk.device]["io"] = io_string_r + (" " if io_string_w and io_string_r else "") + io_string_w
if CONFIG.swap_disk and MemBox.swap_on:
cls.disks["__swap"] = { "name" : "swap", "used_percent" : cls.swap_percent["used"], "free_percent" : cls.swap_percent["free"], "io" : "" }
for name in ["total", "used", "free"]:
cls.disks["__swap"][name] = cls.swap_string[name]
if len(cls.disks) > 2:
try:
new = { list(cls.disks)[0] : cls.disks.pop(list(cls.disks)[0])}
new["__swap"] = cls.disks.pop("__swap")
new.update(cls.disks)
cls.disks = new
except:
pass
if cls.old_disks != list(cls.disks) or cls.old_io_disks != list(cls.disks_io_dict):
MemBox.redraw = True
cls.recheck_diskutil = True
cls.old_disks = list(cls.disks)
cls.old_io_disks = list(cls.disks_io_dict)
cls.timestamp = time()
@classmethod
def _draw(cls):
MemBox._draw_fg()
class NetCollector(Collector):
'''Collects network stats'''
buffer: str = NetBox.buffer
nics: List[str] = []
nic_i: int = 0
nic: str = ""
new_nic: str = ""
nic_error: bool = False
reset: bool = False
graph_raise: Dict[str, int] = {"download" : 5, "upload" : 5}
graph_lower: Dict[str, int] = {"download" : 5, "upload" : 5}
#min_top: int = 10<<10
#* Stats structure = stats[netword device][download, upload][total, last, top, graph_top, offset, speed, redraw, graph_raise, graph_low] = int, List[int], bool
stats: Dict[str, Dict[str, Dict[str, Any]]] = {}
#* Strings structure strings[network device][download, upload][total, byte_ps, bit_ps, top, graph_top] = str
strings: Dict[str, Dict[str, Dict[str, str]]] = {}
switched: bool = False
timestamp: float = time()
net_min: Dict[str, int] = {"download" : -1, "upload" : -1}
auto_min: bool = CONFIG.net_auto
net_iface: str = CONFIG.net_iface
sync_top: int = 0
sync_string: str = ""
address: str = ""
@classmethod
def _get_nics(cls):
'''Get a list of all network devices sorted by highest throughput'''
cls.nic_i = 0
cls.nics = []
cls.nic = ""
try:
io_all = psutil.net_io_counters(pernic=True)
except Exception as e:
if not cls.nic_error:
cls.nic_error = True
errlog.exception(f'{e}')
if not io_all: return
up_stat = psutil.net_if_stats()
for nic in sorted(io_all.keys(), key=lambda nic: (getattr(io_all[nic], "bytes_recv", 0) + getattr(io_all[nic], "bytes_sent", 0)), reverse=True):
if nic not in up_stat or not up_stat[nic].isup:
continue
cls.nics.append(nic)
if not cls.nics: cls.nics = [""]
cls.nic = cls.nics[cls.nic_i]
if cls.net_iface and cls.net_iface in cls.nics:
cls.nic = cls.net_iface
cls.nic_i = cls.nics.index(cls.nic)
@classmethod
def switch(cls, key: str):
if cls.net_iface: cls.net_iface = ""
if len(cls.nics) < 2 and cls.nic in cls.nics:
return
if cls.nic_i == -1:
cls.nic_i = 0 if key == "n" else -1
else:
cls.nic_i += +1 if key == "n" else -1
cls.nic_i %= len(cls.nics)
cls.new_nic = cls.nics[cls.nic_i]
cls.switched = True
Collector.collect(NetCollector, redraw=True)
@classmethod
def _collect(cls):
speed: int
stat: Dict
up_stat = psutil.net_if_stats()
if sorted(cls.nics) != sorted(nic for nic in up_stat if up_stat[nic].isup):
old_nic = cls.nic
cls._get_nics()
cls.nic = old_nic
if cls.nic not in cls.nics:
cls.nic_i = -1
else:
cls.nic_i = cls.nics.index(cls.nic)
if cls.switched:
cls.nic = cls.new_nic
cls.switched = False
if not cls.nic or cls.nic not in up_stat:
cls._get_nics()
if not cls.nic: return
try:
io_all = psutil.net_io_counters(pernic=True)[cls.nic]
except KeyError:
pass
return
if not cls.nic in cls.stats:
cls.stats[cls.nic] = {}
cls.strings[cls.nic] = { "download" : {}, "upload" : {}}
for direction, value in ["download", io_all.bytes_recv], ["upload", io_all.bytes_sent]:
cls.stats[cls.nic][direction] = { "total" : value, "last" : value, "top" : 0, "graph_top" : 0, "offset" : 0, "speed" : [], "redraw" : True, "graph_raise" : 0, "graph_lower" : 7 }
for v in ["total", "byte_ps", "bit_ps", "top", "graph_top"]:
cls.strings[cls.nic][direction][v] = ""
cls.stats[cls.nic]["download"]["total"] = io_all.bytes_recv
cls.stats[cls.nic]["upload"]["total"] = io_all.bytes_sent
if cls.nic in psutil.net_if_addrs():
cls.address = getattr(psutil.net_if_addrs()[cls.nic][0], "address", "")
for direction in ["download", "upload"]:
stat = cls.stats[cls.nic][direction]
strings = cls.strings[cls.nic][direction]
#* Calculate current speed
stat["speed"].append(round((stat["total"] - stat["last"]) / (time() - cls.timestamp)))
stat["last"] = stat["total"]
speed = stat["speed"][-1]
if cls.net_min[direction] == -1:
cls.net_min[direction] = units_to_bytes(getattr(CONFIG, "net_" + direction))
stat["graph_top"] = cls.net_min[direction]
stat["graph_lower"] = 7
if not cls.auto_min:
stat["redraw"] = True
strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True)
if stat["offset"] and stat["offset"] > stat["total"]:
cls.reset = True
if cls.reset:
if not stat["offset"]:
stat["offset"] = stat["total"]
else:
stat["offset"] = 0
if direction == "upload":
cls.reset = False
NetBox.redraw = True
if len(stat["speed"]) > NetBox.width * 2:
del stat["speed"][0]
strings["total"] = floating_humanizer(stat["total"] - stat["offset"])
strings["byte_ps"] = floating_humanizer(stat["speed"][-1], per_second=True)
strings["bit_ps"] = floating_humanizer(stat["speed"][-1], bit=True, per_second=True)
if speed > stat["top"] or not stat["top"]:
stat["top"] = speed
strings["top"] = floating_humanizer(stat["top"], bit=True, per_second=True)
if cls.auto_min:
if speed > stat["graph_top"]:
stat["graph_raise"] += 1
if stat["graph_lower"] > 0: stat["graph_lower"] -= 1
elif speed < stat["graph_top"] // 10:
stat["graph_lower"] += 1
if stat["graph_raise"] > 0: stat["graph_raise"] -= 1
if stat["graph_raise"] >= 5 or stat["graph_lower"] >= 5:
if stat["graph_raise"] >= 5:
stat["graph_top"] = round(max(stat["speed"][-5:]) / 0.8)
elif stat["graph_lower"] >= 5:
stat["graph_top"] = max(10 << 10, max(stat["speed"][-5:]) * 3)
stat["graph_raise"] = 0
stat["graph_lower"] = 0
stat["redraw"] = True
strings["graph_top"] = floating_humanizer(stat["graph_top"], short=True)
cls.timestamp = time()
if CONFIG.net_sync:
c_max: int = max(cls.stats[cls.nic]["download"]["graph_top"], cls.stats[cls.nic]["upload"]["graph_top"])
if c_max != cls.sync_top:
cls.sync_top = c_max
cls.sync_string = floating_humanizer(cls.sync_top, short=True)
NetBox.redraw = True
@classmethod
def _draw(cls):
NetBox._draw_fg()
class ProcCollector(Collector):
'''Collects process stats'''
buffer: str = ProcBox.buffer
search_filter: str = ""
case_sensitive: bool = False
processes: Dict = {}
num_procs: int = 0
det_cpu: float = 0.0
detailed: bool = False
detailed_pid: Union[int, None] = None
details: Dict[str, Any] = {}
details_cpu: List[int] = []
details_mem: List[int] = []
expand: int = 0
collapsed: Dict = {}
tree_counter: int = 0
p_values: List[str] = ["pid", "name", "cmdline", "num_threads", "username", "memory_percent", "cpu_percent", "cpu_times", "create_time"]
sort_expr: Dict = {}
sort_expr["pid"] = compile("p.info['pid']", "str", "eval")
sort_expr["program"] = compile("'' if p.info['name'] == 0.0 else p.info['name']", "str", "eval")
sort_expr["arguments"] = compile("' '.join(str(p.info['cmdline'])) or ('' if p.info['name'] == 0.0 else p.info['name'])", "str", "eval")
sort_expr["threads"] = compile("0 if p.info['num_threads'] == 0.0 else p.info['num_threads']", "str", "eval")
sort_expr["user"] = compile("'' if p.info['username'] == 0.0 else p.info['username']", "str", "eval")
sort_expr["memory"] = compile("p.info['memory_percent']", "str", "eval")
sort_expr["cpu lazy"] = compile("(sum(p.info['cpu_times'][:2] if not p.info['cpu_times'] == 0.0 else [0.0, 0.0]) * 1000 / (time() - p.info['create_time']))", "str", "eval")
sort_expr["cpu responsive"] = compile("(p.info['cpu_percent'] if CONFIG.proc_per_core else (p.info['cpu_percent'] / THREADS))", "str", "eval")
@classmethod
def _collect(cls):
'''List all processess with pid, name, arguments, threads, username, memory percent and cpu percent'''
if not "proc" in Box.boxes: return
out: Dict = {}
cls.det_cpu = 0.0
sorting: str = CONFIG.proc_sorting
reverse: bool = not CONFIG.proc_reversed
proc_per_cpu: bool = CONFIG.proc_per_core
search: List[str] = []
if cls.search_filter:
if cls.case_sensitive:
search = [i.strip() for i in cls.search_filter.split(",")]
else:
search = [i.strip() for i in cls.search_filter.lower().split(",")]
err: float = 0.0
n: int = 0
if CONFIG.proc_tree and sorting == "arguments":
sorting = "program"
sort_cmd = cls.sort_expr[sorting]
if CONFIG.proc_tree:
cls._tree(sort_cmd=sort_cmd, reverse=reverse, proc_per_cpu=proc_per_cpu, search=search)
else:
for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse):
if cls.collect_interrupt or cls.proc_interrupt:
return
if p.info["name"] == "idle" or p.info["name"] == err or p.info["pid"] == err:
continue
if p.info["cmdline"] == err:
p.info["cmdline"] = ""
if p.info["username"] == err:
p.info["username"] = ""
if p.info["num_threads"] == err:
p.info["num_threads"] = 0
if search:
if cls.detailed and p.info["pid"] == cls.detailed_pid:
cls.det_cpu = p.info["cpu_percent"]
for value in [ p.info["name"], " ".join(p.info["cmdline"]), str(p.info["pid"]), p.info["username"] ]:
if not cls.case_sensitive:
value = value.lower()
for s in search:
if s in value:
break
else: continue
break
else: continue
cpu = p.info["cpu_percent"] if proc_per_cpu else round(p.info["cpu_percent"] / THREADS, 2)
mem = p.info["memory_percent"]
if CONFIG.proc_mem_bytes and hasattr(p.info["memory_info"], "rss"):
mem_b = p.info["memory_info"].rss
else:
mem_b = 0
cmd = " ".join(p.info["cmdline"]) or "[" + p.info["name"] + "]"
out[p.info["pid"]] = {
"name" : p.info["name"],
"cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""),
"threads" : p.info["num_threads"],
"username" : p.info["username"],
"mem" : mem,
"mem_b" : mem_b,
"cpu" : cpu }
n += 1
cls.num_procs = n
cls.processes = out.copy()
if cls.detailed:
cls.expand = ((ProcBox.width - 2) - ((ProcBox.width - 2) // 3) - 40) // 10
if cls.expand > 5: cls.expand = 5
if cls.detailed and not cls.details.get("killed", False):
try:
c_pid = cls.detailed_pid
det = psutil.Process(c_pid)
except (psutil.NoSuchProcess, psutil.ZombieProcess):
cls.details["killed"] = True
cls.details["status"] = psutil.STATUS_DEAD
ProcBox.redraw = True
else:
attrs: List[str] = ["status", "memory_info", "create_time"]
if not SYSTEM == "MacOS": attrs.extend(["cpu_num"])
if cls.expand:
attrs.extend(["nice", "terminal"])
if not SYSTEM == "MacOS": attrs.extend(["io_counters"])
if not c_pid in cls.processes: attrs.extend(["pid", "name", "cmdline", "num_threads", "username", "memory_percent"])
cls.details = det.as_dict(attrs=attrs, ad_value="")
if det.parent() != None: cls.details["parent_name"] = det.parent().name()
else: cls.details["parent_name"] = ""
cls.details["pid"] = c_pid
if c_pid in cls.processes:
cls.details["name"] = cls.processes[c_pid]["name"]
cls.details["cmdline"] = cls.processes[c_pid]["cmd"]
cls.details["threads"] = f'{cls.processes[c_pid]["threads"]}'
cls.details["username"] = cls.processes[c_pid]["username"]
cls.details["memory_percent"] = cls.processes[c_pid]["mem"]
cls.details["cpu_percent"] = round(cls.processes[c_pid]["cpu"] * (1 if CONFIG.proc_per_core else THREADS))
else:
cls.details["cmdline"] = " ".join(cls.details["cmdline"]) or "[" + cls.details["name"] + "]"
cls.details["threads"] = f'{cls.details["num_threads"]}'
cls.details["cpu_percent"] = round(cls.det_cpu)
cls.details["killed"] = False
if SYSTEM == "MacOS":
cls.details["cpu_num"] = -1
cls.details["io_counters"] = ""
if hasattr(cls.details["memory_info"], "rss"): cls.details["memory_bytes"] = floating_humanizer(cls.details["memory_info"].rss) # type: ignore
else: cls.details["memory_bytes"] = "? Bytes"
if isinstance(cls.details["create_time"], float):
uptime = timedelta(seconds=round(time()-cls.details["create_time"],0))
if uptime.days > 0: cls.details["uptime"] = f'{uptime.days}d {str(uptime).split(",")[1][:-3].strip()}'
else: cls.details["uptime"] = f'{uptime}'
else: cls.details["uptime"] = "??:??:??"
if cls.expand:
if cls.expand > 1 : cls.details["nice"] = f'{cls.details["nice"]}'
if SYSTEM == "BSD":
if cls.expand > 2:
if hasattr(cls.details["io_counters"], "read_count"): cls.details["io_read"] = f'{cls.details["io_counters"].read_count}'
else: cls.details["io_read"] = "?"
if cls.expand > 3:
if hasattr(cls.details["io_counters"], "write_count"): cls.details["io_write"] = f'{cls.details["io_counters"].write_count}'
else: cls.details["io_write"] = "?"
else:
if cls.expand > 2:
if hasattr(cls.details["io_counters"], "read_bytes"): cls.details["io_read"] = floating_humanizer(cls.details["io_counters"].read_bytes)
else: cls.details["io_read"] = "?"
if cls.expand > 3:
if hasattr(cls.details["io_counters"], "write_bytes"): cls.details["io_write"] = floating_humanizer(cls.details["io_counters"].write_bytes)
else: cls.details["io_write"] = "?"
if cls.expand > 4 : cls.details["terminal"] = f'{cls.details["terminal"]}'.replace("/dev/", "")
cls.details_cpu.append(cls.details["cpu_percent"])
mem = cls.details["memory_percent"]
if mem > 80: mem = round(mem)
elif mem > 60: mem = round(mem * 1.2)
elif mem > 30: mem = round(mem * 1.5)
elif mem > 10: mem = round(mem * 2)
elif mem > 5: mem = round(mem * 10)
else: mem = round(mem * 20)
cls.details_mem.append(mem)
if len(cls.details_cpu) > ProcBox.width: del cls.details_cpu[0]
if len(cls.details_mem) > ProcBox.width: del cls.details_mem[0]
@classmethod
def _tree(cls, sort_cmd, reverse: bool, proc_per_cpu: bool, search: List[str]):
'''List all processess in a tree view with pid, name, threads, username, memory percent and cpu percent'''
out: Dict = {}
err: float = 0.0
det_cpu: float = 0.0
infolist: Dict = {}
cls.tree_counter += 1
tree = defaultdict(list)
n: int = 0
for p in sorted(psutil.process_iter(cls.p_values + (["memory_info"] if CONFIG.proc_mem_bytes else []), err), key=lambda p: eval(sort_cmd), reverse=reverse):
if cls.collect_interrupt: return
try:
tree[p.ppid()].append(p.pid)
except (psutil.NoSuchProcess, psutil.ZombieProcess):
pass
else:
infolist[p.pid] = p.info
n += 1
if 0 in tree and 0 in tree[0]:
tree[0].remove(0)
def create_tree(pid: int, tree: defaultdict, indent: str = "", inindent: str = " ", found: bool = False, depth: int = 0, collapse_to: Union[None, int] = None):
nonlocal infolist, proc_per_cpu, search, out, det_cpu
name: str; threads: int; username: str; mem: float; cpu: float; collapse: bool = False
cont: bool = True
getinfo: Dict = {}
if cls.collect_interrupt: return
try:
name = psutil.Process(pid).name()
if name == "idle": return
except psutil.Error:
pass
cont = False
name = ""
if pid in infolist:
getinfo = infolist[pid]
if search and not found:
if cls.detailed and pid == cls.detailed_pid:
det_cpu = getinfo["cpu_percent"]
if "username" in getinfo and isinstance(getinfo["username"], float): getinfo["username"] = ""
if "cmdline" in getinfo and isinstance(getinfo["cmdline"], float): getinfo["cmdline"] = ""
for value in [ name, str(pid), getinfo.get("username", ""), " ".join(getinfo.get("cmdline", "")) ]:
if not cls.case_sensitive:
value = value.lower()
for s in search:
if s in value:
found = True
break
else: continue
break
else: cont = False
if cont:
if getinfo:
if getinfo["num_threads"] == err: threads = 0
else: threads = getinfo["num_threads"]
if getinfo["username"] == err: username = ""
else: username = getinfo["username"]
cpu = getinfo["cpu_percent"] if proc_per_cpu else round(getinfo["cpu_percent"] / THREADS, 2)
mem = getinfo["memory_percent"]
if getinfo["cmdline"] == err: cmd = ""
else: cmd = " ".join(getinfo["cmdline"]) or "[" + getinfo["name"] + "]"
if CONFIG.proc_mem_bytes and hasattr(getinfo["memory_info"], "rss"):
mem_b = getinfo["memory_info"].rss
else:
mem_b = 0
else:
threads = mem_b = 0
username = ""
mem = cpu = 0.0
if pid in cls.collapsed:
collapse = cls.collapsed[pid]
else:
collapse = depth > CONFIG.tree_depth
cls.collapsed[pid] = collapse
if collapse_to and not search:
out[collapse_to]["threads"] += threads
out[collapse_to]["mem"] += mem
out[collapse_to]["mem_b"] += mem_b
out[collapse_to]["cpu"] += cpu
else:
if pid in tree and len(tree[pid]) > 0:
sign: str = "+" if collapse else "-"
inindent = inindent.replace(" ├─ ", "[" + sign + "]─").replace(" └─ ", "[" + sign + "]─")
out[pid] = {
"indent" : inindent,
"name": name,
"cmd" : cmd.replace("\n", "").replace("\t", "").replace("\\", ""),
"threads" : threads,
"username" : username,
"mem" : mem,
"mem_b" : mem_b,
"cpu" : cpu,
"depth" : depth,
}
if search: collapse = False
elif collapse and not collapse_to:
collapse_to = pid
if pid not in tree:
return
children = tree[pid][:-1]
for child in children:
create_tree(child, tree, indent + " │ ", indent + " ├─ ", found=found, depth=depth+1, collapse_to=collapse_to)
create_tree(tree[pid][-1], tree, indent + " ", indent + " └─ ", depth=depth+1, collapse_to=collapse_to)
create_tree(min(tree), tree)
cls.det_cpu = det_cpu
if cls.collect_interrupt: return
if cls.tree_counter >= 100:
cls.tree_counter = 0
for pid in list(cls.collapsed):
if not psutil.pid_exists(pid):
del cls.collapsed[pid]
cls.num_procs = len(out)
cls.processes = out.copy()
@classmethod
def sorting(cls, key: str):
index: int = CONFIG.sorting_options.index(CONFIG.proc_sorting) + (1 if key in ["right", "l"] else -1)
if index >= len(CONFIG.sorting_options): index = 0
elif index < 0: index = len(CONFIG.sorting_options) - 1
CONFIG.proc_sorting = CONFIG.sorting_options[index]
if "left" in Key.mouse: del Key.mouse["left"]
Collector.collect(ProcCollector, interrupt=True, redraw=True)
@classmethod
def _draw(cls):
ProcBox._draw_fg()
class Menu:
'''Holds all menus'''
active: bool = False
close: bool = False
resized: bool = True
menus: Dict[str, Dict[str, str]] = {}
menu_length: Dict[str, int] = {}
background: str = ""
for name, menu in MENUS.items():
menu_length[name] = len(menu["normal"][0])
menus[name] = {}
for sel in ["normal", "selected"]:
menus[name][sel] = ""
for i in range(len(menu[sel])):
menus[name][sel] += Fx.trans(f'{Color.fg(MENU_COLORS[sel][i])}{menu[sel][i]}')
if i < len(menu[sel]) - 1: menus[name][sel] += f'{Mv.d(1)}{Mv.l(len(menu[sel][i]))}'
@classmethod
def main(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
banner: str = ""
redraw: bool = True
key: str = ""
mx: int = 0
my: int = 0
skip: bool = False
mouse_over: bool = False
mouse_items: Dict[str, Dict[str, int]] = {}
cls.active = True
cls.resized = True
menu_names: List[str] = list(cls.menus.keys())
menu_index: int = 0
menu_current: str = menu_names[0]
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
while not cls.close:
key = ""
if cls.resized:
banner = (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
if UpdateChecker.version != VERSION:
banner += f'{Mv.to(Term.height, 1)}{Fx.b}{THEME.title}New release {UpdateChecker.version} available at https://github.com/aristocratos/bpytop{Fx.ub}{Term.fg}'
cy = 0
for name, menu in cls.menus.items():
ypos = Term.height // 2 - 2 + cy
xpos = Term.width // 2 - (cls.menu_length[name] // 2)
mouse_items[name] = { "x1" : xpos, "x2" : xpos + cls.menu_length[name] - 1, "y1" : ypos, "y2" : ypos + 2 }
cy += 3
redraw = True
cls.resized = False
if redraw:
out = ""
for name, menu in cls.menus.items():
out += f'{Mv.to(mouse_items[name]["y1"], mouse_items[name]["x1"])}{menu["selected" if name == menu_current else "normal"]}'
if skip and redraw:
Draw.now(out)
elif not skip:
Draw.now(f'{cls.background}{banner}{out}')
skip = redraw = False
if Key.input_wait(Timer.left(), mouse=True):
if Key.mouse_moved():
mx, my = Key.get_mouse()
for name, pos in mouse_items.items():
if pos["x1"] <= mx <= pos["x2"] and pos["y1"] <= my <= pos["y2"]:
mouse_over = True
if name != menu_current:
menu_current = name
menu_index = menu_names.index(name)
redraw = True
break
else:
mouse_over = False
else:
key = Key.get()
if key == "mouse_click" and not mouse_over:
key = "M"
if key == "q":
clean_quit()
elif key in ["escape", "M"]:
cls.close = True
break
elif key in ["up", "mouse_scroll_up", "shift_tab"]:
menu_index -= 1
if menu_index < 0: menu_index = len(menu_names) - 1
menu_current = menu_names[menu_index]
redraw = True
elif key in ["down", "mouse_scroll_down", "tab"]:
menu_index += 1
if menu_index > len(menu_names) - 1: menu_index = 0
menu_current = menu_names[menu_index]
redraw = True
elif key == "enter" or (key == "mouse_click" and mouse_over):
if menu_current == "quit":
clean_quit()
elif menu_current == "options":
cls.options()
cls.resized = True
elif menu_current == "help":
cls.help()
cls.resized = True
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
@classmethod
def help(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
out_misc : str = ""
redraw: bool = True
key: str = ""
skip: bool = False
main_active: bool = cls.active
cls.active = True
cls.resized = True
if not cls.background:
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
help_items: Dict[str, str] = {
"(Mouse 1)" : "Clicks buttons and selects in process list.",
"Selected (Mouse 1)" : "Show detailed information for selected process.",
"(Mouse scroll)" : "Scrolls any scrollable list/text under cursor.",
"(Esc, shift+m)" : "Toggles main menu.",
"(m)" : "Cycle view presets, order: full->proc->stat->user.",
"(1)" : "Toggle CPU box.",
"(2)" : "Toggle MEM box.",
"(3)" : "Toggle NET box.",
"(4)" : "Toggle PROC box.",
"(d)" : "Toggle disks view in MEM box.",
"(F2, o)" : "Shows options.",
"(F1, shift+h)" : "Shows this window.",
"(ctrl+z)" : "Sleep program and put in background.",
"(ctrl+c, q)" : "Quits program.",
"(+) / (-)" : "Add/Subtract 100ms to/from update timer.",
"(Up, k) (Down, j)" : "Select in process list.",
"(Enter)" : "Show detailed information for selected process.",
"(Spacebar)" : "Expand/collapse the selected process in tree view.",
"(Pg Up) (Pg Down)" : "Jump 1 page in process list.",
"(Home) (End)" : "Jump to first or last page in process list.",
"(Left, h) (Right, l)" : "Select previous/next sorting column.",
"(b) (n)" : "Select previous/next network device.",
"(s)" : "Toggle showing swap as a disk.",
"(i)" : "Toggle disks io mode with big graphs.",
"(z)" : "Toggle totals reset for current network device",
"(a)" : "Toggle auto scaling for the network graphs.",
"(y)" : "Toggle synced scaling mode for network graphs.",
"(f)" : "Input a NON case-sensitive process filter.",
"(shift+f)" : "Input a case-sensitive process filter.",
"(c)" : "Toggle per-core cpu usage of processes.",
"(r)" : "Reverse sorting order in processes box.",
"(e)" : "Toggle processes tree view.",
"(delete)" : "Clear any entered filter.",
"Selected (shift+t)" : "Terminate selected process with SIGTERM - 15.",
"Selected (shift+k)" : "Kill selected process with SIGKILL - 9.",
"Selected (shift+i)" : "Interrupt selected process with SIGINT - 2.",
"_1" : " ",
"_2" : "For bug reporting and project updates, visit:",
"_3" : "https://github.com/aristocratos/bpytop",
}
while not cls.close:
key = ""
if cls.resized:
y = 8 if Term.height < len(help_items) + 10 else Term.height // 2 - len(help_items) // 2 + 4
out_misc = (f'{Banner.draw(y-7, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
x = Term.width//2-36
h, w = Term.height-2-y, 72
if len(help_items) > h:
pages = ceil(len(help_items) / h)
else:
h = len(help_items)
pages = 0
page = 1
out_misc += create_box(x, y, w, h+3, "help", line_color=THEME.div_line)
redraw = True
cls.resized = False
if redraw:
out = ""
cy = 0
if pages:
out += (f'{Mv.to(y, x+56)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} '
f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}')
out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}'
for n, (keys, desc) in enumerate(help_items.items()):
if pages and n < (page - 1) * h: continue
out += f'{Mv.to(y+2+cy, x+1)}{Fx.b}{("" if keys.startswith("_") else keys):^20.20}{Fx.ub}{desc:50.50}'
cy += 1
if cy == h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+2+cy+i, x+1)}{" " * (w-2)}'
if skip and redraw:
Draw.now(out)
elif not skip:
Draw.now(f'{cls.background}{out_misc}{out}')
skip = redraw = False
if Key.input_wait(Timer.left()):
key = Key.get()
if key == "mouse_click":
mx, my = Key.get_mouse()
if x <= mx < x + w and y <= my < y + h + 3:
if pages and my == y and x + 56 < mx < x + 61:
key = "up"
elif pages and my == y and x + 63 < mx < x + 68:
key = "down"
else:
key = "escape"
if key == "q":
clean_quit()
elif key in ["escape", "M", "enter", "backspace", "H", "f1"]:
cls.close = True
break
elif key in ["up", "mouse_scroll_up", "page_up"] and pages:
page -= 1
if page < 1: page = pages
redraw = True
elif key in ["down", "mouse_scroll_down", "page_down"] and pages:
page += 1
if page > pages: page = 1
redraw = True
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
if main_active:
cls.close = False
return
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
@classmethod
def options(cls):
if Term.width < 80 or Term.height < 24:
errlog.warning(f'The menu system only works on a terminal size of 80x24 or above!')
return
out: str = ""
out_misc : str = ""
redraw: bool = True
selected_cat: str = ""
selected_int: int = 0
option_items: Dict[str, List[str]] = {}
cat_list: List[str] = []
cat_int: int = 0
change_cat: bool = False
key: str = ""
skip: bool = False
main_active: bool = cls.active
cls.active = True
cls.resized = True
d_quote: str
inputting: bool = False
input_val: str = ""
Theme.refresh()
if not cls.background:
cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
categories: Dict[str, Dict[str, List[str]]] = {
"system" : {
"color_theme" : [
'Set color theme.',
'',
'Choose from all theme files in',
'"/usr/[local/]share/bpytop/themes" and',
'"~/.config/bpytop/themes".',
'',
'"Default" for builtin default theme.',
'User themes are prefixed by a plus sign "+".',
'',
'For theme updates see:',
'https://github.com/aristocratos/bpytop'],
"theme_background" : [
'If the theme set background should be shown.',
'',
'Set to False if you want terminal background',
'transparency.'],
"truecolor" : [
'Sets if 24-bit truecolor should be used.',
'(Requires restart to take effect!)',
'',
'Will convert 24-bit colors to 256 color',
'(6x6x6 color cube) if False.',
'',
'Set to False if your terminal doesn\'t have',
'truecolor support and can\'t convert to',
'256-color.'],
"shown_boxes" : [
'Manually set which boxes to show.',
'',
'Available values are "cpu mem net proc".',
'Seperate values with whitespace.',
'',
'Toggle between presets with mode key "m".'],
"update_ms" : [
'Update time in milliseconds.',
'',
'Recommended 2000 ms or above for better sample',
'times for graphs.',
'',
'Min value: 100 ms',
'Max value: 86400000 ms = 24 hours.'],
"draw_clock" : [
'Draw a clock at top of screen.',
'(Only visible if cpu box is enabled!)',
'',
'Formatting according to strftime, empty',
'string to disable.',
'',
'Custom formatting options:',
'"/host" = hostname',
'"/user" = username',
'"/uptime" = system uptime',
'',
'Examples of strftime formats:',
'"%X" = locale HH:MM:SS',
'"%H" = 24h hour, "%I" = 12h hour',
'"%M" = minute, "%S" = second',
'"%d" = day, "%m" = month, "%y" = year'],
"background_update" : [
'Update main ui when menus are showing.',
'',
'True or False.',
'',
'Set this to false if the menus is flickering',
'too much for a comfortable experience.'],
"show_battery" : [
'Show battery stats.',
'(Only visible if cpu box is enabled!)',
'',
'Show battery stats in the top right corner',
'if a battery is present.'],
"show_init" : [
'Show init screen at startup.',
'',
'The init screen is purely cosmetical and',
'slows down start to show status messages.'],
"update_check" : [
'Check for updates at start.',
'',
'Checks for latest version from:',
'https://github.com/aristocratos/bpytop'],
"log_level" : [
'Set loglevel for error.log',
'',
'Levels are: "ERROR" "WARNING" "INFO" "DEBUG".',
'The level set includes all lower levels,',
'i.e. "DEBUG" will show all logging info.']
},
"cpu" : {
"cpu_graph_upper" : [
'Sets the CPU stat shown in upper half of',
'the CPU graph.',
'',
'"total" = Total cpu usage.',
'"user" = User mode cpu usage.',
'"system" = Kernel mode cpu usage.',
'See:',
'https://psutil.readthedocs.io/en/latest/',
'#psutil.cpu_times',
'for attributes available on specific platforms.'],
"cpu_graph_lower" : [
'Sets the CPU stat shown in lower half of',
'the CPU graph.',
'',
'"total" = Total cpu usage.',
'"user" = User mode cpu usage.',
'"system" = Kernel mode cpu usage.',
'See:',
'https://psutil.readthedocs.io/en/latest/',
'#psutil.cpu_times',
'for attributes available on specific platforms.'],
"cpu_invert_lower" : [
'Toggles orientation of the lower CPU graph.',
'',
'True or False.'],
"cpu_single_graph" : [
'Completely disable the lower CPU graph.',
'',
'Shows only upper CPU graph and resizes it',
'to fit to box height.',
'',
'True or False.'],
"check_temp" : [
'Enable cpu temperature reporting.',
'',
'True or False.'],
"cpu_sensor" : [
'Cpu temperature sensor',
'',
'Select the sensor that corresponds to',
'your cpu temperature.',
'Set to "Auto" for auto detection.'],
"show_coretemp" : [
'Show temperatures for cpu cores.',
'',
'Only works if check_temp is True and',
'the system is reporting core temps.'],
"temp_scale" : [
'Which temperature scale to use.',
'',
'Celsius, default scale.',
'',
'Fahrenheit, the american one.',
'',
'Kelvin, 0 = absolute zero, 1 degree change',
'equals 1 degree change in Celsius.',
'',
'Rankine, 0 = abosulte zero, 1 degree change',
'equals 1 degree change in Fahrenheit.'],
"custom_cpu_name" : [
'Custom cpu model name in cpu percentage box.',
'',
'Empty string to disable.'],
"show_uptime" : [
'Shows the system uptime in the CPU box.',
'',
'Can also be shown in the clock by using',
'"/uptime" in the formatting.',
'',
'True or False.'],
},
"mem" : {
"mem_graphs" : [
'Show graphs for memory values.',
'',
'True or False.'],
"show_disks" : [
'Split memory box to also show disks.',
'',
'True or False.'],
"show_io_stat" : [
'Toggle small IO stat graphs.',
'',
'Toggles the small IO graphs for the regular',
'disk usage view.',
'',
'True or False.'],
"io_mode" : [
'Toggles io mode for disks.',
'',
'Shows big graphs for disk read/write speeds',
'instead of used/free percentage meters.',
'',
'True or False.'],
"io_graph_combined" : [
'Toggle combined read and write graphs.',
'',
'Only has effect if "io mode" is True.',
'',
'True or False.'],
"io_graph_speeds" : [
'Set top speeds for the io graphs.',
'',
'Manually set which speed in MiB/s that equals',
'100 percent in the io graphs.',
'(10 MiB/s by default).',
'',
'Format: "device:speed" seperate disks with a',
'comma ",".',
'',
'Example: "/dev/sda:100, /dev/sdb:20".'],
"show_swap" : [
'If swap memory should be shown in memory box.',
'',
'True or False.'],
"swap_disk" : [
'Show swap as a disk.',
'',
'Ignores show_swap value above.',
'Inserts itself after first disk.'],
"only_physical" : [
'Filter out non physical disks.',
'',
'Set this to False to include network disks,',
'RAM disks and similar.',
'',
'True or False.'],
"use_fstab" : [
'Read disks list from /etc/fstab.',
'(Has no effect on macOS X)',
'',
'This also disables only_physical.',
'',
'True or False.'],
"disks_filter" : [
'Optional filter for shown disks.',
'',
'Should be full path of a mountpoint,',
'"root" replaces "/", separate multiple values',
'with a comma ",".',
'Begin line with "exclude=" to change to exclude',
'filter.',
'Oterwise defaults to "most include" filter.',
'',
'Example: disks_filter="exclude=/boot, /home/user"'],
},
"net" : {
"net_download" : [
'Fixed network graph download value.',
'',
'Default "10M" = 10 MibiBytes.',
'Possible units:',
'"K" (KiB), "M" (MiB), "G" (GiB).',
'',
'Append "bit" for bits instead of bytes,',
'i.e "100Mbit"',
'',
'Can be toggled with auto button.'],
"net_upload" : [
'Fixed network graph upload value.',
'',
'Default "10M" = 10 MibiBytes.',
'Possible units:',
'"K" (KiB), "M" (MiB), "G" (GiB).',
'',
'Append "bit" for bits instead of bytes,',
'i.e "100Mbit"',
'',
'Can be toggled with auto button.'],
"net_auto" : [
'Start in network graphs auto rescaling mode.',
'',
'Ignores any values set above at start and',
'rescales down to 10KibiBytes at the lowest.',
'',
'True or False.'],
"net_sync" : [
'Network scale sync.',
'',
'Syncs the scaling for download and upload to',
'whichever currently has the highest scale.',
'',
'True or False.'],
"net_color_fixed" : [
'Set network graphs color gradient to fixed.',
'',
'If True the network graphs color is based',
'on the total bandwidth usage instead of',
'the current autoscaling.',
'',
'The bandwidth usage is based on the',
'"net_download" and "net_upload" values set',
'above.'],
"net_iface" : [
'Network Interface.',
'',
'Manually set the starting Network Interface.',
'Will otherwise automatically choose the NIC',
'with the highest total download since boot.'],
},
"proc" : {
"proc_update_mult" : [
'Processes update multiplier.',
'Sets how often the process list is updated as',
'a multiplier of "update_ms".',
'',
'Set to 2 or higher to greatly decrease bpytop',
'cpu usage. (Only integers)'],
"proc_sorting" : [
'Processes sorting option.',
'',
'Possible values: "pid", "program", "arguments",',
'"threads", "user", "memory", "cpu lazy" and',
'"cpu responsive".',
'',
'"cpu lazy" updates top process over time,',
'"cpu responsive" updates top process directly.'],
"proc_reversed" : [
'Reverse processes sorting order.',
'',
'True or False.'],
"proc_tree" : [
'Processes tree view.',
'',
'Set true to show processes grouped by parents,',
'with lines drawn between parent and child',
'process.'],
"tree_depth" : [
'Process tree auto collapse depth.',
'',
'Sets the depth were the tree view will auto',
'collapse processes at.'],
"proc_colors" : [
'Enable colors in process view.',
'',
'Uses the cpu graph gradient colors.'],
"proc_gradient" : [
'Enable process view gradient fade.',
'',
'Fades from top or current selection.',
'Max fade value is equal to current themes',
'"inactive_fg" color value.'],
"proc_per_core" : [
'Process usage per core.',
'',
'If process cpu usage should be of the core',
'it\'s running on or usage of the total',
'available cpu power.',
'',
'If true and process is multithreaded',
'cpu usage can reach over 100%.'],
"proc_mem_bytes" : [
'Show memory as bytes in process list.',
' ',
'True or False.'],
}
}
loglevel_i: int = CONFIG.log_levels.index(CONFIG.log_level)
cpu_sensor_i: int = CONFIG.cpu_sensors.index(CONFIG.cpu_sensor)
cpu_graph_i: Dict[str, int] = { "cpu_graph_upper" : CONFIG.cpu_percent_fields.index(CONFIG.cpu_graph_upper),
"cpu_graph_lower" : CONFIG.cpu_percent_fields.index(CONFIG.cpu_graph_lower)}
temp_scale_i: int = CONFIG.temp_scales.index(CONFIG.temp_scale)
color_i: int
max_opt_len: int = max([len(categories[x]) for x in categories]) * 2
cat_list = list(categories)
while not cls.close:
key = ""
if cls.resized or change_cat:
cls.resized = change_cat = False
selected_cat = list(categories)[cat_int]
option_items = categories[cat_list[cat_int]]
option_len: int = len(option_items) * 2
y = 12 if Term.height < max_opt_len + 13 else Term.height // 2 - max_opt_len // 2 + 7
out_misc = (f'{Banner.draw(y-10, center=True)}{Mv.d(1)}{Mv.l(46)}{Colors.black_bg}{Colors.default}{Fx.b}← esc'
f'{Mv.r(30)}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}')
x = Term.width//2-38
x2 = x + 27
h, w, w2 = min(Term.height-1-y, option_len), 26, 50
h -= h % 2
color_i = list(Theme.themes).index(THEME.current)
out_misc += create_box(x, y - 3, w+w2+1, 3, f'tab{Symbol.right}', line_color=THEME.div_line)
out_misc += create_box(x, y, w, h+2, "options", line_color=THEME.div_line)
redraw = True
cat_width = floor((w+w2) / len(categories))
out_misc += f'{Fx.b}'
for cx, cat in enumerate(categories):
out_misc += f'{Mv.to(y-2, x + 1 + (cat_width * cx) + round(cat_width / 2 - len(cat) / 2 ))}'
if cat == selected_cat:
out_misc += f'{THEME.hi_fg}[{THEME.title}{Fx.u}{cat}{Fx.uu}{THEME.hi_fg}]'
else:
out_misc += f'{THEME.hi_fg}{SUPERSCRIPT[cx+1]}{THEME.title}{cat}'
out_misc += f'{Fx.ub}'
if option_len > h:
pages = ceil(option_len / h)
else:
h = option_len
pages = 0
page = pages if selected_int == -1 and pages > 0 else 1
selected_int = 0 if selected_int >= 0 else len(option_items) - 1
if redraw:
out = ""
cy = 0
selected = list(option_items)[selected_int]
if pages:
out += (f'{Mv.to(y+h+1, x+11)}{THEME.div_line(Symbol.title_left)}{Fx.b}{THEME.title("pg")}{Fx.ub}{THEME.main_fg(Symbol.up)} {Fx.b}{THEME.title}{page}/{pages} '
f'pg{Fx.ub}{THEME.main_fg(Symbol.down)}{THEME.div_line(Symbol.title_right)}')
#out += f'{Mv.to(y+1, x+1)}{THEME.title}{Fx.b}{"Keys:":^20}Description:{THEME.main_fg}'
for n, opt in enumerate(option_items):
if pages and n < (page - 1) * ceil(h / 2): continue
value = getattr(CONFIG, opt)
t_color = f'{THEME.selected_bg}{THEME.selected_fg}' if opt == selected else f'{THEME.title}'
v_color = "" if opt == selected else f'{THEME.title}'
d_quote = '"' if isinstance(value, str) else ""
if opt == "color_theme":
counter = f' {color_i + 1}/{len(Theme.themes)}'
elif opt == "proc_sorting":
counter = f' {CONFIG.sorting_options.index(CONFIG.proc_sorting) + 1}/{len(CONFIG.sorting_options)}'
elif opt == "log_level":
counter = f' {loglevel_i + 1}/{len(CONFIG.log_levels)}'
elif opt == "cpu_sensor":
counter = f' {cpu_sensor_i + 1}/{len(CONFIG.cpu_sensors)}'
elif opt in ["cpu_graph_upper", "cpu_graph_lower"]:
counter = f' {cpu_graph_i[opt] + 1}/{len(CONFIG.cpu_percent_fields)}'
elif opt == "temp_scale":
counter = f' {temp_scale_i + 1}/{len(CONFIG.temp_scales)}'
else:
counter = ""
out += f'{Mv.to(y+1+cy, x+1)}{t_color}{Fx.b}{opt.replace("_", " ").capitalize() + counter:^24.24}{Fx.ub}{Mv.to(y+2+cy, x+1)}{v_color}'
if opt == selected:
if isinstance(value, bool) or opt in ["color_theme", "proc_sorting", "log_level", "cpu_sensor", "cpu_graph_upper", "cpu_graph_lower", "temp_scale"]:
out += f'{t_color} {Symbol.left}{v_color}{d_quote + str(value) + d_quote:^20.20}{t_color}{Symbol.right} '
elif inputting:
out += f'{str(input_val)[-17:] + Fx.bl + "█" + Fx.ubl + "" + Symbol.enter:^33.33}'
else:
out += ((f'{t_color} {Symbol.left}{v_color}' if type(value) is int else " ") +
f'{str(value) + " " + Symbol.enter:^20.20}' + (f'{t_color}{Symbol.right} ' if type(value) is int else " "))
else:
out += f'{d_quote + str(value) + d_quote:^24.24}'
out += f'{Term.bg}'
if opt == selected:
h2 = len(option_items[opt]) + 2
y2 = y + (selected_int * 2) - ((page-1) * h)
if y2 + h2 > Term.height: y2 = Term.height - h2
out += f'{create_box(x2, y2, w2, h2, "description", line_color=THEME.div_line)}{THEME.main_fg}'
for n, desc in enumerate(option_items[opt]):
out += f'{Mv.to(y2+1+n, x2+2)}{desc:.48}'
cy += 2
if cy >= h: break
if cy < h:
for i in range(h-cy):
out += f'{Mv.to(y+1+cy+i, x+1)}{" " * (w-2)}'
if not skip or redraw:
Draw.now(f'{cls.background}{out_misc}{out}')
skip = redraw = False
if Key.input_wait(Timer.left()):
key = Key.get()
redraw = True
has_sel = False
if key == "mouse_click" and not inputting:
mx, my = Key.get_mouse()
if x < mx < x + w + w2 and y - 4 < my < y:
# if my == y - 2:
for cx, cat in enumerate(categories):
ccx = x + (cat_width * cx) + round(cat_width / 2 - len(cat) / 2 )
if ccx - 2 < mx < ccx + 2 + len(cat):
key = str(cx+1)
break
elif x < mx < x + w and y < my < y + h + 2:
mouse_sel = ceil((my - y) / 2) - 1 + ceil((page-1) * (h / 2))
if pages and my == y+h+1 and x+11 < mx < x+16:
key = "page_up"
elif pages and my == y+h+1 and x+19 < mx < x+24:
key = "page_down"
elif my == y+h+1:
pass
elif mouse_sel == selected_int:
if mx < x + 6:
key = "left"
elif mx > x + 19:
key = "right"
else:
key = "enter"
elif mouse_sel < len(option_items):
selected_int = mouse_sel
has_sel = True
else:
key = "escape"
if inputting:
if key in ["escape", "mouse_click"]:
inputting = False
elif key == "enter":
inputting = False
if str(getattr(CONFIG, selected)) != input_val:
if selected == "update_ms":
if not input_val or int(input_val) < 100:
CONFIG.update_ms = 100
elif int(input_val) > 86399900:
CONFIG.update_ms = 86399900
else:
CONFIG.update_ms = int(input_val)
elif selected == "proc_update_mult":
if not input_val or int(input_val) < 1:
CONFIG.proc_update_mult = 1
else:
CONFIG.proc_update_mult = int(input_val)
Collector.proc_counter = 1
elif selected == "tree_depth":
if not input_val or int(input_val) < 0:
CONFIG.tree_depth = 0
else:
CONFIG.tree_depth = int(input_val)
ProcCollector.collapsed = {}
elif selected == "shown_boxes":
new_boxes: List = []
for box in input_val.split():
if box in ["cpu", "mem", "net", "proc"]:
new_boxes.append(box)
CONFIG.shown_boxes = " ".join(new_boxes)
Box.view_mode = "user"
Box.view_modes["user"] = CONFIG.shown_boxes.split()
Draw.clear(saved=True)
elif isinstance(getattr(CONFIG, selected), str):
setattr(CONFIG, selected, input_val)
if selected.startswith("net_"):
NetCollector.net_min = {"download" : -1, "upload" : -1}
elif selected == "draw_clock":
Box.clock_on = len(CONFIG.draw_clock) > 0
if not Box.clock_on: Draw.clear("clock", saved=True)
elif selected == "io_graph_speeds":
MemBox.graph_speeds = {}
Term.refresh(force=True)
cls.resized = False
elif key == "backspace" and len(input_val):
input_val = input_val[:-1]
elif key == "delete":
input_val = ""
elif isinstance(getattr(CONFIG, selected), str) and len(key) == 1:
input_val += key
elif isinstance(getattr(CONFIG, selected), int) and key.isdigit():
input_val += key
elif key == "q":
clean_quit()
elif key in ["escape", "o", "M", "f2"]:
cls.close = True
break
elif key == "tab" or (key == "down" and selected_int == len(option_items) - 1 and (page == pages or pages == 0)):
if cat_int == len(categories) - 1:
cat_int = 0
else:
cat_int += 1
change_cat = True
elif key == "shift_tab" or (key == "up" and selected_int == 0 and page == 1):
if cat_int == 0:
cat_int = len(categories) - 1
else:
cat_int -= 1
change_cat = True
selected_int = -1 if key != "shift_tab" else 0
elif key in list(map(str, range(1, len(cat_list)+1))) and key != str(cat_int + 1):
cat_int = int(key) - 1
change_cat = True
elif key == "enter" and selected in ["update_ms", "disks_filter", "custom_cpu_name", "net_download",
"net_upload", "draw_clock", "tree_depth", "proc_update_mult", "shown_boxes", "net_iface", "io_graph_speeds"]:
inputting = True
input_val = str(getattr(CONFIG, selected))
elif key == "left" and selected == "update_ms" and CONFIG.update_ms - 100 >= 100:
CONFIG.update_ms -= 100
Box.draw_update_ms()
elif key == "right" and selected == "update_ms" and CONFIG.update_ms + 100 <= 86399900:
CONFIG.update_ms += 100
Box.draw_update_ms()
elif key == "left" and selected == "proc_update_mult" and CONFIG.proc_update_mult > 1:
CONFIG.proc_update_mult -= 1
Collector.proc_counter = 1
elif key == "right" and selected == "proc_update_mult":
CONFIG.proc_update_mult += 1
Collector.proc_counter = 1
elif key == "left" and selected == "tree_depth" and CONFIG.tree_depth > 0:
CONFIG.tree_depth -= 1
ProcCollector.collapsed = {}
elif key == "right" and selected == "tree_depth":
CONFIG.tree_depth += 1
ProcCollector.collapsed = {}
elif key in ["left", "right"] and isinstance(getattr(CONFIG, selected), bool):
setattr(CONFIG, selected, not getattr(CONFIG, selected))
if selected == "check_temp":
if CONFIG.check_temp:
CpuCollector.get_sensors()
else:
CpuCollector.sensor_method = ""
CpuCollector.got_sensors = False
if selected in ["net_auto", "net_color_fixed", "net_sync"]:
if selected == "net_auto": NetCollector.auto_min = CONFIG.net_auto
NetBox.redraw = True
if selected == "theme_background":
Term.bg = f'{THEME.main_bg}' if CONFIG.theme_background else "\033[49m"
Draw.now(Term.bg)
if selected == "show_battery":
Draw.clear("battery", saved=True)
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "color_theme" and len(Theme.themes) > 1:
if key == "left":
color_i -= 1
if color_i < 0: color_i = len(Theme.themes) - 1
elif key == "right":
color_i += 1
if color_i > len(Theme.themes) - 1: color_i = 0
Collector.collect_idle.wait()
CONFIG.color_theme = list(Theme.themes)[color_i]
THEME(CONFIG.color_theme)
Term.refresh(force=True)
Timer.finish()
elif key in ["left", "right"] and selected == "proc_sorting":
ProcCollector.sorting(key)
elif key in ["left", "right"] and selected == "log_level":
if key == "left":
loglevel_i -= 1
if loglevel_i < 0: loglevel_i = len(CONFIG.log_levels) - 1
elif key == "right":
loglevel_i += 1
if loglevel_i > len(CONFIG.log_levels) - 1: loglevel_i = 0
CONFIG.log_level = CONFIG.log_levels[loglevel_i]
errlog.setLevel(getattr(logging, CONFIG.log_level))
errlog.info(f'Loglevel set to {CONFIG.log_level}')
elif key in ["left", "right"] and selected in ["cpu_graph_upper", "cpu_graph_lower"]:
if key == "left":
cpu_graph_i[selected] -= 1
if cpu_graph_i[selected] < 0: cpu_graph_i[selected] = len(CONFIG.cpu_percent_fields) - 1
if key == "right":
cpu_graph_i[selected] += 1
if cpu_graph_i[selected] > len(CONFIG.cpu_percent_fields) - 1: cpu_graph_i[selected] = 0
setattr(CONFIG, selected, CONFIG.cpu_percent_fields[cpu_graph_i[selected]])
setattr(CpuCollector, selected.replace("_graph", ""), [])
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "temp_scale":
if key == "left":
temp_scale_i -= 1
if temp_scale_i < 0: temp_scale_i = len(CONFIG.temp_scales) - 1
if key == "right":
temp_scale_i += 1
if temp_scale_i > len(CONFIG.temp_scales) - 1: temp_scale_i = 0
CONFIG.temp_scale = CONFIG.temp_scales[temp_scale_i]
Term.refresh(force=True)
cls.resized = False
elif key in ["left", "right"] and selected == "cpu_sensor" and len(CONFIG.cpu_sensors) > 1:
if key == "left":
cpu_sensor_i -= 1
if cpu_sensor_i < 0: cpu_sensor_i = len(CONFIG.cpu_sensors) - 1
elif key == "right":
cpu_sensor_i += 1
if cpu_sensor_i > len(CONFIG.cpu_sensors) - 1: cpu_sensor_i = 0
Collector.collect_idle.wait()
CpuCollector.sensor_swap = True
CONFIG.cpu_sensor = CONFIG.cpu_sensors[cpu_sensor_i]
if CONFIG.check_temp and (CpuCollector.sensor_method != "psutil" or CONFIG.cpu_sensor == "Auto"):
CpuCollector.get_sensors()
Term.refresh(force=True)
cls.resized = False
elif key in ["up", "mouse_scroll_up"]:
selected_int -= 1
if selected_int < 0: selected_int = len(option_items) - 1
page = floor(selected_int * 2 / h) + 1
elif key in ["down", "mouse_scroll_down"]:
selected_int += 1
if selected_int > len(option_items) - 1: selected_int = 0
page = floor(selected_int * 2 / h) + 1
elif key == "page_up":
if not pages or page == 1:
selected_int = 0
else:
page -= 1
if page < 1: page = pages
selected_int = (page-1) * ceil(h / 2)
elif key == "page_down":
if not pages or page == pages:
selected_int = len(option_items) - 1
else:
page += 1
if page > pages: page = 1
selected_int = (page-1) * ceil(h / 2)
elif has_sel:
pass
else:
redraw = False
if Timer.not_zero() and not cls.resized:
skip = True
else:
Collector.collect()
Collector.collect_done.wait(2)
if CONFIG.background_update: cls.background = f'{THEME.inactive_fg}' + Fx.uncolor(f'{Draw.saved_buffer()}') + f'{Term.fg}'
Timer.stamp()
if main_active:
cls.close = False
return
Draw.now(f'{Draw.saved_buffer()}')
cls.background = ""
cls.active = False
cls.close = False
class Timer:
timestamp: float
return_zero = False
@classmethod
def stamp(cls):
cls.timestamp = time()
@classmethod
def not_zero(cls) -> bool:
if cls.return_zero:
cls.return_zero = False
return False
return cls.timestamp + (CONFIG.update_ms / 1000) > time()
@classmethod
def left(cls) -> float:
return cls.timestamp + (CONFIG.update_ms / 1000) - time()
@classmethod
def finish(cls):
cls.return_zero = True
cls.timestamp = time() - (CONFIG.update_ms / 1000)
Key.break_wait()
class UpdateChecker:
version: str = VERSION
thread: threading.Thread
@classmethod
def run(cls):
cls.thread = threading.Thread(target=cls._checker)
cls.thread.start()
@classmethod
def _checker(cls):
try:
with urllib.request.urlopen("https://github.com/aristocratos/bpytop/raw/master/bpytop.py", timeout=5) as source: # type: ignore
for line in source:
line = line.decode("utf-8")
if line.startswith("VERSION: str ="):
cls.version = line[(line.index("=")+1):].strip('" \n')
break
except Exception as e:
errlog.exception(f'{e}')
else:
if cls.version != VERSION and which("notify-send"):
try:
subprocess.run(["notify-send", "-u", "normal", "BpyTop Update!",
f'New version of BpyTop available!\nCurrent version: {VERSION}\nNew version: {cls.version}\nDownload at github.com/aristocratos/bpytop',
"-i", "update-notifier", "-t", "10000"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except Exception as e:
errlog.exception(f'{e}')
class Init:
running: bool = True
initbg_colors: List[str] = []
initbg_data: List[int]
initbg_up: Graph
initbg_down: Graph
resized = False
@classmethod
def start(cls):
Draw.buffer("init", z=1)
Draw.buffer("initbg", z=10)
for i in range(51):
for _ in range(2): cls.initbg_colors.append(Color.fg(i, i, i))
Draw.buffer("banner", (f'{Banner.draw(Term.height // 2 - 10, center=True)}{Mv.d(1)}{Mv.l(11)}{Colors.black_bg}{Colors.default}'
f'{Fx.b}{Fx.i}Version: {VERSION}{Fx.ui}{Fx.ub}{Term.bg}{Term.fg}{Color.fg("#50")}'), z=2)
for _i in range(7):
perc = f'{str(round((_i + 1) * 14 + 2)) + "%":>5}'
Draw.buffer("+banner", f'{Mv.to(Term.height // 2 - 2 + _i, Term.width // 2 - 28)}{Fx.trans(perc)}{Symbol.v_line}')
Draw.out("banner")
Draw.buffer("+init!", f'{Color.fg("#cc")}{Fx.b}{Mv.to(Term.height // 2 - 2, Term.width // 2 - 21)}{Mv.save}')
cls.initbg_data = [randint(0, 100) for _ in range(Term.width * 2)]
cls.initbg_up = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=True)
cls.initbg_down = Graph(Term.width, Term.height // 2, cls.initbg_colors, cls.initbg_data, invert=False)
@classmethod
def success(cls):
if not CONFIG.show_init or cls.resized: return
cls.draw_bg(5)
Draw.buffer("+init!", f'{Mv.restore}{Symbol.ok}\n{Mv.r(Term.width // 2 - 22)}{Mv.save}')
@staticmethod
def fail(err):
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Symbol.fail}')
sleep(2)
errlog.exception(f'{err}')
clean_quit(1, errmsg=f'Error during init! See {CONFIG_DIR}/error.log for more information.')
@classmethod
def draw_bg(cls, times: int = 5):
for _ in range(times):
sleep(0.05)
x = randint(0, 100)
Draw.buffer("initbg", f'{Fx.ub}{Mv.to(0, 0)}{cls.initbg_up(x)}{Mv.to(Term.height // 2, 0)}{cls.initbg_down(x)}')
Draw.out("initbg", "banner", "init")
@classmethod
def done(cls):
cls.running = False
if not CONFIG.show_init: return
if cls.resized:
Draw.now(Term.clear)
else:
cls.draw_bg(10)
Draw.clear("initbg", "banner", "init", saved=True)
if cls.resized: return
del cls.initbg_up, cls.initbg_down, cls.initbg_data, cls.initbg_colors
#? Functions ------------------------------------------------------------------------------------->
def get_cpu_name() -> str:
'''Fetch a suitable CPU identifier from the CPU model name string'''
name: str = ""
nlist: List = []
command: str = ""
cmd_out: str = ""
rem_line: str = ""
if SYSTEM == "Linux":
command = "cat /proc/cpuinfo"
rem_line = "model name"
elif SYSTEM == "MacOS":
command ="sysctl -n machdep.cpu.brand_string"
elif SYSTEM == "BSD":
command ="sysctl hw.model"
rem_line = "hw.model"
try:
cmd_out = subprocess.check_output("LANG=C " + command, shell=True, universal_newlines=True)
except:
pass
if rem_line:
for line in cmd_out.split("\n"):
if rem_line in line:
name = re.sub( ".*" + rem_line + ".*:", "", line,1).lstrip()
else:
name = cmd_out
nlist = name.split(" ")
try:
if "Xeon" in name and "CPU" in name:
name = nlist[nlist.index("CPU")+(-1 if name.endswith(("CPU", "z")) else 1)]
elif "Ryzen" in name:
name = " ".join(nlist[nlist.index("Ryzen"):nlist.index("Ryzen")+3])
elif "Duo" in name and "@" in name:
name = " ".join(nlist[:nlist.index("@")])
elif "CPU" in name and not nlist[0] == "CPU" and not nlist[nlist.index("CPU")-1].isdigit():
name = nlist[nlist.index("CPU")-1]
except:
pass
name = name.replace("Processor", "").replace("CPU", "").replace("(R)", "").replace("(TM)", "").replace("Intel", "")
name = re.sub(r"\d?\.?\d+[mMgG][hH][zZ]", "", name)
name = " ".join(name.split())
return name
def get_cpu_core_mapping() -> List[int]:
mapping: List[int] = []
core_ids: List[int] = []
if SYSTEM == "Linux" and os.path.isfile("/proc/cpuinfo"):
try:
mapping = [0] * THREADS
num = 0
with open("/proc/cpuinfo", "r") as f:
for line in f:
if line.startswith("processor"):
num = int(line.strip()[(line.index(": ")+2):])
if num > THREADS - 1:
break
elif line.startswith("core id"):
core_id = int(line.strip()[(line.index(": ")+2):])
if core_id not in core_ids:
core_ids.append(core_id)
mapping[num] = core_ids.index(core_id)
if num < THREADS - 1:
raise Exception
except:
mapping = []
if not mapping:
mapping = []
for _ in range(THREADS // CORES):
mapping.extend([x for x in range(CORES)])
return mapping
def create_box(x: int = 0, y: int = 0, width: int = 0, height: int = 0, title: str = "", title2: str = "", line_color: Color = None, title_color: Color = None, fill: bool = True, box = None) -> str:
'''Create a box from a box object or by given arguments'''
out: str = f'{Term.fg}{Term.bg}'
num: int = 0
if not line_color: line_color = THEME.div_line
if not title_color: title_color = THEME.title
#* Get values from box class if given
if box:
x = box.x
y = box.y
width = box.width
height = box.height
title = box.name
num = box.num
hlines: Tuple[int, int] = (y, y + height - 1)
out += f'{line_color}'
#* Draw all horizontal lines
for hpos in hlines:
out += f'{Mv.to(hpos, x)}{Symbol.h_line * (width - 1)}'
#* Draw all vertical lines and fill if enabled
for hpos in range(hlines[0]+1, hlines[1]):
out += f'{Mv.to(hpos, x)}{Symbol.v_line}{" " * (width-2) if fill else Mv.r(width-2)}{Symbol.v_line}'
#* Draw corners
out += f'{Mv.to(y, x)}{Symbol.left_up}\
{Mv.to(y, x + width - 1)}{Symbol.right_up}\
{Mv.to(y + height - 1, x)}{Symbol.left_down}\
{Mv.to(y + height - 1, x + width - 1)}{Symbol.right_down}'
#* Draw titles if enabled
if title:
numbered: str = "" if not num else f'{THEME.hi_fg(SUPERSCRIPT[num])}'
out += f'{Mv.to(y, x + 2)}{Symbol.title_left}{Fx.b}{numbered}{title_color}{title}{Fx.ub}{line_color}{Symbol.title_right}'
if title2:
out += f'{Mv.to(hlines[1], x + 2)}{Symbol.title_left}{title_color}{Fx.b}{title2}{Fx.ub}{line_color}{Symbol.title_right}'
return f'{out}{Term.fg}{Mv.to(y + 1, x + 1)}'
def now_sleeping(signum, frame):
"""Reset terminal settings and stop background input read before putting to sleep"""
Key.stop()
Collector.stop()
Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title())
Term.echo(True)
os.kill(os.getpid(), signal.SIGSTOP)
def now_awake(signum, frame):
"""Set terminal settings and restart background input read"""
Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP"))
Term.echo(False)
Key.start()
Term.refresh()
Box.calc_sizes()
Box.draw_bg()
Collector.start()
def quit_sigint(signum, frame):
"""SIGINT redirection to clean_quit()"""
clean_quit()
def clean_quit(errcode: int = 0, errmsg: str = "", thread: bool = False):
"""Stop background input read, save current config and reset terminal settings before quitting"""
global THREAD_ERROR
if thread:
THREAD_ERROR = errcode
interrupt_main()
return
if THREAD_ERROR: errcode = THREAD_ERROR
Key.stop()
Collector.stop()
if not errcode: CONFIG.save_config()
Draw.now(Term.clear, Term.normal_screen, Term.show_cursor, Term.mouse_off, Term.mouse_direct_off, Term.title())
Term.echo(True)
if errcode == 0:
errlog.info(f'Exiting. Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n')
else:
errlog.warning(f'Exiting with errorcode ({errcode}). Runtime {timedelta(seconds=round(time() - SELF_START, 0))} \n')
if not errmsg: errmsg = f'Bpytop exited with errorcode ({errcode}). See {CONFIG_DIR}/error.log for more information!'
if errmsg: print(errmsg)
raise SystemExit(errcode)
def floating_humanizer(value: Union[float, int], bit: bool = False, per_second: bool = False, start: int = 0, short: bool = False) -> str:
'''Scales up in steps of 1024 to highest possible unit and returns string with unit suffixed
* bit=True or defaults to bytes
* start=int to set 1024 multiplier starting unit
* short=True always returns 0 decimals and shortens unit to 1 character
'''
out: str = ""
mult: int = 8 if bit else 1
selector: int = start
unit: Tuple[str, ...] = UNITS["bit"] if bit else UNITS["byte"]
if isinstance(value, float): value = round(value * 100 * mult)
elif value > 0: value *= 100 * mult
else: value = 0
while len(f'{value}') > 5 and value >= 102400:
value >>= 10
if value < 100:
out = f'{value}'
break
selector += 1
else:
if len(f'{value}') == 4 and selector > 0:
out = f'{value}'[:-2] + "." + f'{value}'[-2]
elif len(f'{value}') == 3 and selector > 0:
out = f'{value}'[:-2] + "." + f'{value}'[-2:]
elif len(f'{value}') >= 2:
out = f'{value}'[:-2]
else:
out = f'{value}'
if short:
if "." in out:
out = f'{round(float(out))}'
if len(out) > 3:
out = f'{int(out[0]) + 1}'
selector += 1
out += f'{"" if short else " "}{unit[selector][0] if short else unit[selector]}'
if per_second: out += "ps" if bit else "/s"
return out
def units_to_bytes(value: str) -> int:
if not value: return 0
out: int = 0
mult: int = 0
bit: bool = False
value_i: int = 0
units: Dict[str, int] = {"k" : 1, "m" : 2, "g" : 3}
try:
if value.lower().endswith("s"):
value = value[:-1]
if value.lower().endswith("bit"):
bit = True
value = value[:-3]
elif value.lower().endswith("byte"):
value = value[:-4]
if value[-1].lower() in units:
mult = units[value[-1].lower()]
value = value[:-1]
if "." in value and value.replace(".", "").isdigit():
if mult > 0:
value_i = round(float(value) * 1024)
mult -= 1
else:
value_i = round(float(value))
elif value.isdigit():
value_i = int(value)
out = int(value_i) << (10 * mult)
if bit: out = round(out / 8)
except ValueError:
out = 0
return out
def min_max(value: int, min_value: int=0, max_value: int=100) -> int:
return max(min_value, min(value, max_value))
def readfile(file: str, default: str = "") -> str:
out: Union[str, None] = None
if os.path.isfile(file):
try:
with open(file, "r") as f:
out = f.read().strip()
except:
pass
return default if out is None else out
def temperature(value: int, scale: str = "celsius") -> Tuple[int, str]:
"""Returns a tuple with integer value and string unit converted from an integer in celsius to: celsius, fahrenheit, kelvin or rankine."""
if scale == "celsius":
return (value, "°C")
elif scale == "fahrenheit":
return (round(value * 1.8 + 32), "°F")
elif scale == "kelvin":
return (round(value + 273.15), "°K")
elif scale == "rankine":
return (round(value * 1.8 + 491.67), "°R")
else:
return (0, "")
def process_keys():
mouse_pos: Tuple[int, int] = (0, 0)
filtered: bool = False
box_keys = {"1" : "cpu", "2" : "mem", "3" : "net", "4" : "proc"}
while Key.has_key():
key = Key.get()
found: bool = True
if key in ["mouse_scroll_up", "mouse_scroll_down", "mouse_click"]:
mouse_pos = Key.get_mouse()
if mouse_pos[0] >= ProcBox.x and ProcBox.current_y + 1 <= mouse_pos[1] < ProcBox.current_y + ProcBox.current_h - 1:
pass
elif key == "mouse_click":
key = "mouse_unselect"
else:
key = "_null"
if ProcBox.filtering:
if key in ["enter", "mouse_click", "mouse_unselect"]:
ProcBox.filtering = False
Collector.collect(ProcCollector, redraw=True, only_draw=True)
continue
elif key in ["escape", "delete"]:
ProcCollector.search_filter = ""
ProcBox.filtering = False
elif len(key) == 1:
ProcCollector.search_filter += key
elif key == "backspace" and len(ProcCollector.search_filter) > 0:
ProcCollector.search_filter = ProcCollector.search_filter[:-1]
else:
continue
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
if filtered: Collector.collect_done.wait(0.1)
filtered = True
continue
if key == "_null":
continue
elif key == "q":
clean_quit()
elif key == "+" and CONFIG.update_ms + 100 <= 86399900:
CONFIG.update_ms += 100
Box.draw_update_ms()
elif key == "-" and CONFIG.update_ms - 100 >= 100:
CONFIG.update_ms -= 100
Box.draw_update_ms()
elif key in ["M", "escape"]:
Menu.main()
elif key in ["o", "f2"]:
Menu.options()
elif key in ["H", "f1"]:
Menu.help()
elif key == "m":
if list(Box.view_modes).index(Box.view_mode) + 1 > len(list(Box.view_modes)) - 1:
Box.view_mode = list(Box.view_modes)[0]
else:
Box.view_mode = list(Box.view_modes)[(list(Box.view_modes).index(Box.view_mode) + 1)]
CONFIG.shown_boxes = " ".join(Box.view_modes[Box.view_mode])
Draw.clear(saved=True)
Term.refresh(force=True)
elif key in box_keys:
boxes = CONFIG.shown_boxes.split()
if box_keys[key] in boxes:
boxes.remove(box_keys[key])
else:
boxes.append(box_keys[key])
CONFIG.shown_boxes = " ".join(boxes)
Box.view_mode = "user"
Box.view_modes["user"] = CONFIG.shown_boxes.split()
Draw.clear(saved=True)
Term.refresh(force=True)
else:
found = False
if found: continue
if "proc" in Box.boxes:
if key in ["left", "right", "h", "l"]:
ProcCollector.sorting(key)
elif key == " " and CONFIG.proc_tree and ProcBox.selected > 0:
if ProcBox.selected_pid in ProcCollector.collapsed:
ProcCollector.collapsed[ProcBox.selected_pid] = not ProcCollector.collapsed[ProcBox.selected_pid]
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "e":
CONFIG.proc_tree = not CONFIG.proc_tree
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "r":
CONFIG.proc_reversed = not CONFIG.proc_reversed
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key == "c":
CONFIG.proc_per_core = not CONFIG.proc_per_core
Collector.collect(ProcCollector, interrupt=True, redraw=True)
elif key in ["f", "F"]:
ProcBox.filtering = True
ProcCollector.case_sensitive = key == "F"
if not ProcCollector.search_filter: ProcBox.start = 0
Collector.collect(ProcCollector, redraw=True, only_draw=True)
elif key in ["T", "K", "I"] and (ProcBox.selected > 0 or ProcCollector.detailed):
pid: int = ProcBox.selected_pid if ProcBox.selected > 0 else ProcCollector.detailed_pid # type: ignore
if psutil.pid_exists(pid):
if key == "T": sig = signal.SIGTERM
elif key == "K": sig = signal.SIGKILL
elif key == "I": sig = signal.SIGINT
try:
os.kill(pid, sig)
except Exception as e:
errlog.error(f'Exception when sending signal {sig} to pid {pid}')
errlog.exception(f'{e}')
elif key == "delete" and ProcCollector.search_filter:
ProcCollector.search_filter = ""
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
elif key == "enter":
if ProcBox.selected > 0 and ProcCollector.detailed_pid != ProcBox.selected_pid and psutil.pid_exists(ProcBox.selected_pid):
ProcCollector.detailed = True
ProcBox.last_selection = ProcBox.selected
ProcBox.selected = 0
ProcCollector.detailed_pid = ProcBox.selected_pid
ProcBox.resized = True
Collector.proc_counter = 1
elif ProcCollector.detailed:
ProcBox.selected = ProcBox.last_selection
ProcBox.last_selection = 0
ProcCollector.detailed = False
ProcCollector.detailed_pid = None
ProcBox.resized = True
Collector.proc_counter = 1
else:
continue
ProcCollector.details = {}
ProcCollector.details_cpu = []
ProcCollector.details_mem = []
Graphs.detailed_cpu = NotImplemented
Graphs.detailed_mem = NotImplemented
Collector.collect(ProcCollector, proc_interrupt=True, redraw=True)
elif key in ["up", "down", "mouse_scroll_up", "mouse_scroll_down", "page_up", "page_down", "home", "end", "mouse_click", "mouse_unselect", "j", "k"]:
ProcBox.selector(key, mouse_pos)
if "net" in Box.boxes:
if key in ["b", "n"]:
NetCollector.switch(key)
elif key == "z":
NetCollector.reset = not NetCollector.reset
Collector.collect(NetCollector, redraw=True)
elif key == "y":
CONFIG.net_sync = not CONFIG.net_sync
Collector.collect(NetCollector, redraw=True)
elif key == "a":
NetCollector.auto_min = not NetCollector.auto_min
NetCollector.net_min = {"download" : -1, "upload" : -1}
Collector.collect(NetCollector, redraw=True)
if "mem" in Box.boxes:
if key == "g":
CONFIG.mem_graphs = not CONFIG.mem_graphs
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "s":
Collector.collect_idle.wait()
CONFIG.swap_disk = not CONFIG.swap_disk
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "d":
Collector.collect_idle.wait()
CONFIG.show_disks = not CONFIG.show_disks
Collector.collect(MemCollector, interrupt=True, redraw=True)
elif key == "i":
Collector.collect_idle.wait()
CONFIG.io_mode = not CONFIG.io_mode
Collector.collect(MemCollector, interrupt=True, redraw=True)
#? Pre main -------------------------------------------------------------------------------------->
CPU_NAME: str = get_cpu_name()
CORE_MAP: List[int] = get_cpu_core_mapping()
THEME: Theme
def main():
global THEME
Term.width = os.get_terminal_size().columns
Term.height = os.get_terminal_size().lines
#? Init -------------------------------------------------------------------------------------->
if DEBUG: TimeIt.start("Init")
#? Switch to alternate screen, clear screen, hide cursor, enable mouse reporting and disable input echo
Draw.now(Term.alt_screen, Term.clear, Term.hide_cursor, Term.mouse_on, Term.title("BpyTOP"))
Term.echo(False)
#Term.refresh(force=True)
#? Start a thread checking for updates while running init
if CONFIG.update_check: UpdateChecker.run()
#? Draw banner and init status
if CONFIG.show_init and not Init.resized:
Init.start()
#? Load theme
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Loading theme and creating colors... ")}{Mv.save}')
try:
THEME = Theme(CONFIG.color_theme)
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Setup boxes
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Doing some maths and drawing... ")}{Mv.save}')
try:
if CONFIG.check_temp: CpuCollector.get_sensors()
Box.calc_sizes()
Box.draw_bg(now=False)
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Setup signal handlers for SIGSTP, SIGCONT, SIGINT and SIGWINCH
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Setting up signal handlers... ")}{Mv.save}')
try:
signal.signal(signal.SIGTSTP, now_sleeping) #* Ctrl-Z
signal.signal(signal.SIGCONT, now_awake) #* Resume
signal.signal(signal.SIGINT, quit_sigint) #* Ctrl-C
signal.signal(signal.SIGWINCH, Term.refresh) #* Terminal resized
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Start a separate thread for reading keyboard input
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting input reader thread... ")}{Mv.save}')
try:
if isinstance(sys.stdin, io.TextIOWrapper) and sys.version_info >= (3, 7):
sys.stdin.reconfigure(errors="ignore") # type: ignore
Key.start()
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Start a separate thread for data collection and drawing
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Starting data collection and drawer thread... ")}{Mv.save}')
try:
Collector.start()
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Collect data and draw to buffer
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Collecting data and drawing... ")}{Mv.save}')
try:
Collector.collect(draw_now=False)
pass
except Exception as e:
Init.fail(e)
else:
Init.success()
#? Draw to screen
if CONFIG.show_init:
Draw.buffer("+init!", f'{Mv.restore}{Fx.trans("Finishing up... ")}{Mv.save}')
try:
Collector.collect_done.wait()
except Exception as e:
Init.fail(e)
else:
Init.success()
Init.done()
Term.refresh()
Draw.out(clear=True)
if CONFIG.draw_clock:
Box.clock_on = True
if DEBUG: TimeIt.stop("Init")
#? Main loop ------------------------------------------------------------------------------------->
def run():
while not False:
Term.refresh()
Timer.stamp()
while Timer.not_zero():
if Key.input_wait(Timer.left()):
process_keys()
Collector.collect()
#? Start main loop
try:
run()
except Exception as e:
errlog.exception(f'{e}')
clean_quit(1)
else:
#? Quit cleanly even if false starts being true...
clean_quit()
if __name__ == "__main__":
main()
``` |
{
"source": "jonasbgood/StreetTreesOfNYC",
"score": 2
} |
#### File: jonasbgood/StreetTreesOfNYC/trees_of_nyc.py
```python
import numpy as np
import dash
import dash_table
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
from sodapy import Socrata
import pandas as pd
## predefined order for different health options
## although 'Alive' exists but just once, probably erroneous entry
health_status_order = ['Good', 'Fair', 'Poor', 'Dead', 'Stump']
# max number of data points. Last time I checked there were 683788 data points available
data_limit = 20000
def get_data(data_limit=2000):
# data source
datasource = "data.cityofnewyork.us"
dataset = "uvpi-gqnh"
timeout = 30
########################################################################
# Insert personal app_token here. Alternatively, if you do not want your
# token published in this file you can create a file app_token.txt in the
# same directory containing only the app_token string
token = ''
########################################################################
## Try to read token from file app_token.txt
if token == '':
try:
token = str(np.loadtxt('app_token.txt', dtype=str, max_rows=1))
except:
token = ''
if token != '':
client = Socrata(datasource,token,timeout=timeout)
else:
client = Socrata(datasource, None, timeout=timeout)
record_count_total = client.get(dataset, select="COUNT(*)")
record_count_total = int(record_count_total[0]['COUNT'])
## results_meta = client.get_metadata(dataset)
results = client.get(dataset, limit=data_limit)
client.close()
## Convert to pandas DataFrame
df = pd.DataFrame.from_dict(results)
# make data types usable and consistent
df['tree_id'] = df['tree_id'].astype(int)
df['latitude'] = df['latitude'].astype(float)
df['longitude'] = df['longitude'].astype(float)
df['tree_dbh'] = df['tree_dbh'].astype(float)
df['stump_diam'] = df['stump_diam'].astype(float)
df['status'] = df['status'].astype(str) # in order to handle NaN as 'nan'
df['health'] = df['health'].astype(str)
df['spc_latin'] = df['spc_latin'].astype(str)
df['spc_common'] = df['spc_common'].astype(str)
df['problems'] = df['problems'].astype(str)
## replace small diameter values with higher values for visualization in a new column
df['tree_dbh_vis'] = df.tree_dbh
df.loc[df.status == 'Stump', 'tree_dbh_vis'] = df.stump_diam
df.loc[df.tree_dbh_vis < 5, 'tree_dbh_vis'] = 5
## clipping of extremely large diameter
df.loc[df.tree_dbh_vis > 25, 'tree_dbh_vis'] = 25
## replace values - variant 1, using numpy.where (strange... but it works)
# df.spc_common = np.where(df.health == 'nan', df.status, df.spc_common)
## replace NaN in health by status entries ('Stump' or 'Dead')
# df['health'] = np.where(df['health'] == 'nan', df['status'], df['health'])
## replace nan values with status entires - variant 2, use pandas.where
df.spc_common = df.status.where(df.spc_common == 'nan', df.spc_common)
df['health'] = df['status'].where(df['health'] == 'nan', df['health'])
return df, record_count_total
def create_mapbox_figure(df):
if df.count()[0] > 0:
health_status_selected = df['health'].unique().astype(str)
## set legend entries in predefined order
category_orders = [
val for val in health_status_order if val in health_status_selected]
## change color order to fit health status order
my_colors = px.colors.DEFAULT_PLOTLY_COLORS.copy()
my_colors[0] = px.colors.DEFAULT_PLOTLY_COLORS[2] # 'Good' = green
my_colors[1] = px.colors.DEFAULT_PLOTLY_COLORS[0] # 'Fair' = blue
my_colors[2] = px.colors.DEFAULT_PLOTLY_COLORS[1] # 'Poor' = orange
## set color values
color_discrete_sequence = [my_colors[idx] for idx, val in
enumerate(health_status_order) if val in health_status_selected]
## set hover data
hover_data = {'spc_latin': True,
'health': True,
'problems': True,
'tree_dbh': True,
'tree_dbh_vis': False,
'latitude': False,
'longitude': False,
'tree_id': True,
## it is important to have tree_id on the last position
## for single tree identification in get_single_tree_data()
}
fig = px.scatter_mapbox(df,
lat="latitude",
lon="longitude",
hover_name='spc_common',
hover_data=hover_data,
color='health',
category_orders={'health': category_orders},
color_discrete_sequence=color_discrete_sequence,
size='tree_dbh_vis',
size_max=15,
mapbox_style="carto-positron",
height=1000,
)
fig.update_layout(legend=dict(
yanchor="top",
y=0.99,
xanchor="left",
x=0.01
))
else:
## show empty map
#category_orders = health_status_order
#color_discrete_sequence = my_colors
fig = px.scatter_mapbox(df,
lat="latitude",
lon="longitude",
hover_name='spc_common',
mapbox_style="carto-positron",
height=1000)
## this might help to remember maps position and zoom
## still looking for better handling of new data and keeping
## user positions...
fig['layout']['uirevision'] = 'my_setup'
return fig
def make_borough_options(df, borough_names):
options = [{'label': val + ' ({})'.format(sum(df.boroname == val)), 'value': val} for val in borough_names]
return options
def make_health_status_options(df, health_status):
options = [{'label': val + ' ({})'.format(sum(df.health == val)), 'value': val} for val in health_status]
return options
## get data
df, record_count_total = get_data(data_limit)
df_count = df.count()[0]
## create health_status filter options
## although 'Alive' exists just once or so, probably errorneous entry
health_status_order = ['Good', 'Fair', 'Poor', 'Dead', 'Stump']
health_status_unique = df['health'].unique().astype(str)
# 1. add known status elements first in order
health_status = [
val for val in health_status_order if val in health_status_unique]
# 2. add additional unexpected or new status elements at back
health_status.extend(
[val for val in health_status_unique if val not in health_status_order])
## compare health status lists and give warning if unexpected elements in health_status_unique
if set(health_status_unique) - set(health_status_order) != set():
print('Warning: Not all health status options covered:', set(health_status_unique) - set(health_status_order))
## create borough filter options
borough_names = df['boroname'].unique()
borough_names.sort()
## set up app
external_stylesheets=['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets, title='Street Trees', prevent_initial_callbacks=False)
app.layout = html.Div([
html.Div([ # main row
html.Div([ # first column, width 8
html.H1(children='Hello Street Trees of New York City'),
dcc.Markdown('''
The NYC 2015 Street Tree Census counts {} entries in total. Entries shown in this application: {}
Data from here: [NYC OpenData 2015 Street Tree Census](https://data.cityofnewyork.us/Environment/2015-Street-Tree-Census-Tree-Data/uvpi-gqnh)
'''.format(record_count_total, df_count)),
## Map for visualization
dcc.Loading(id='loading_1', type='default',
children = dcc.Graph(id='graph_mapbox')),
html.Div([ # column
html.Div([
## Checklist for selecting Boroughs
html.H3('Borough'),
dcc.Checklist(id='checklist_borough',
# make checklist with total number of elements in each category like, e.g.: Queens (212)
options=make_borough_options(df, borough_names),
value=['Brooklyn']),
], className='three columns'),
html.Div([
## Checklist for selecting health status
html.H3('Health status'),
dcc.Checklist(id='checklist_health',
# make checklist with total number of elements in each category like, e.g.: Good (1426)
options=make_health_status_options(df, health_status),
value=health_status),
## storage variables wrapped in Loading(). This gives a
## lifesign when large data sets are processed
html.Br(),
html.Br(),
dcc.Loading(id='loading_2', type='default',
children=[
dcc.Store(id='store_df_filtered'),
dcc.Store(id='store_df_filtered_borough'),
dcc.Store(id='store_df_filtered_health'),
dcc.Store(id='store_df_graph_select'),]),
], className='three columns'),
html.Div([
## Export section
html.H3('Export data'),
html.H6('Complete data set'),
html.Button("Download CSV", id="btn_all_csv"),
dcc.Download(id="download_dataframe_all_csv"),
html.Button("Download XLSX", id="btn_all_xlsx"),
dcc.Download(id="download_dataframe_all_xlsx"),
html.Br(),
html.Br(),
html.H6('Filtered data set'),
html.Button("Download CSV", id="btn_filtered_csv"),
dcc.Download(id="download_dataframe_filtered_csv"),
html.Button("Download XLSX", id="btn_filtered_xlsx"),
dcc.Download(id="download_dataframe_filtered_xlsx"),
html.Br(),
html.Br(),
html.H6('User selected (graphical selection)'),
html.Button("Download CSV", id="btn_graph_select_csv"),
dcc.Download(id="download_dataframe_graph_select_csv"),
html.Button("Download XLSX", id="btn_graph_select_xlsx"),
dcc.Download(id="download_dataframe_graph_select_xlsx"),
], className='six columns'),
], className='column'),
], className='eight columns'),
html.Div([ # second sub column, width 3 for table on right side
## Table showing details of selected item
html.H3('Selected tree'),
dash_table.DataTable(
id='selectedTreeTable',
columns=[{'name': 'Trait', 'id': 'Trait'},
{'name': 'Value', 'id': 'Value'}],
),
], className='three columns'),
], className='row'),
# ## only for testing and debugging
# html.Div('TEST', id='test_text'),
])
##############################################################################
## call back functions
##############################################################################
## update filtered data
@app.callback(dash.dependencies.Output('store_df_filtered', 'data'),
dash.dependencies.Output('store_df_filtered_borough', 'data'),
dash.dependencies.Output('store_df_filtered_health', 'data'),
dash.dependencies.Input('checklist_borough', 'value'),
dash.dependencies.Input('checklist_health', 'value'),
prevent_initial_call=False,)
def update_filtered_data(borough_name, health_status):
df_filtered_borough = df.loc[df['boroname'].isin(borough_name)]
df_filtered_health = df.loc[df['health'].isin(health_status)]
df_filtered = df_filtered_borough.loc[df['health'].isin(health_status)]
return df_filtered.to_json(date_format='iso', orient='split'), \
df_filtered_borough.to_json(date_format='iso', orient='split'), \
df_filtered_health.to_json(date_format='iso', orient='split')
## update mapbox figure
@app.callback(dash.dependencies.Output('graph_mapbox', 'figure'),
dash.dependencies.Input('store_df_filtered', 'data'),
prevent_initial_call=True,)
def update_graph_mapbox(jsonified_filtered_data):
df_filtered = pd.read_json(jsonified_filtered_data, orient='split')
return create_mapbox_figure(df_filtered)
## update checklist_borough
@app.callback(dash.dependencies.Output('checklist_borough', 'options'),
dash.dependencies.Input('store_df_filtered_health', 'data'))
def update_borough_options(jsonified_filtered_data):
df_filtered = pd.read_json(jsonified_filtered_data, orient='split')
options = make_borough_options(df_filtered, borough_names)
return options
## update checklist_health
@app.callback(dash.dependencies.Output('checklist_health', 'options'),
dash.dependencies.Input('store_df_filtered_borough', 'data'))
def update_health_status_options(jsonified_filtered_data):
df_filtered = pd.read_json(jsonified_filtered_data, orient='split')
options = make_health_status_options(df_filtered, health_status)
return options
## save user selected tree_ids
@app.callback(dash.dependencies.Output('store_df_graph_select', 'data'),
dash.dependencies.Input('graph_mapbox', 'selectedData'))
def update_user_selected_data(selected_data):
if selected_data:
tree_ids = [val['customdata'][-1] for val in selected_data['points'] ]
return tree_ids
return None
########################
## data export functions
########################
## all data - csv
@app.callback(dash.dependencies.Output("download_dataframe_all_csv", "data"),
dash.dependencies.Input("btn_all_csv", "n_clicks"),
prevent_initial_call=True,)
def download_all_csv(n_clicks):
df_download = df.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_download.to_csv, "StreetTreesOfNYC.csv")
## all data - excel
@app.callback(dash.dependencies.Output("download_dataframe_all_xlsx", "data"),
dash.dependencies.Input("btn_all_xlsx", "n_clicks"),
prevent_initial_call=True,)
def download_all_xlsx(n_clicks):
df_download = df.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_download.to_excel, "StreetTreesOfNYC.xlsx", sheet_name="Sheet_1")
## filtered data - csv
@app.callback(dash.dependencies.Output("download_dataframe_filtered_csv", "data"),
dash.dependencies.Input("btn_filtered_csv", "n_clicks"),
dash.dependencies.Input('store_df_filtered', 'data'),
prevent_initial_call=True,)
def download_filtered_csv(n_clicks, jsonified_filtered_data):
## make sure that the button was clicked (we ignore the trigger event from altered data)
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
if 'btn_filtered_csv' in changed_id:
df_filter = pd.read_json(jsonified_filtered_data, orient='split')
df_filter = df_filter.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_filter.to_csv, "StreetTreesOfNYC_filtered.csv")
return
## filtered data - excel
@app.callback(dash.dependencies.Output("download_dataframe_filtered_xlsx", "data"),
dash.dependencies.Input("btn_filtered_xlsx", "n_clicks"),
dash.dependencies.Input('store_df_filtered', 'data'),
prevent_initial_call=True,)
def download_filtered_xlsx(n_clicks, jsonified_filtered_data):
## make sure that the button was clicked (we ignore the trigger event from altered data)
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
if 'btn_filtered_xlsx' in changed_id:
df_filter = pd.read_json(jsonified_filtered_data, orient='split')
df_filter = df_filter.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_filter.to_excel, "StreetTreesOfNYC_filtered.xlsx", sheet_name="Sheet_1")
return
## graph selected data - csv
@app.callback(dash.dependencies.Output("download_dataframe_graph_select_csv", "data"),
dash.dependencies.Input("btn_graph_select_csv", "n_clicks"),
dash.dependencies.Input('store_df_graph_select', 'data'),
prevent_initial_call=True,)
def download_graph_select_csv(n_clicks, tree_ids):
## make sure that the button was clicked (we ignore the trigger event from altered data)
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
if 'btn_graph_select_csv' in changed_id and tree_ids:
df_filter = df[df['tree_id'].isin(tree_ids)]
df_filter = df_filter.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_filter.to_csv, "StreetTreesOfNYC_graph_select.csv")
return
## graph selected data - excel
@app.callback(dash.dependencies.Output("download_dataframe_graph_select_xlsx", "data"),
dash.dependencies.Input("btn_graph_select_xlsx", "n_clicks"),
dash.dependencies.Input('store_df_graph_select', 'data'),
prevent_initial_call=True,)
def download_graph_select_xlsx(n_clicks, tree_ids):
## make sure that the button was clicked (we ignore the trigger event from altered data)
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
if 'btn_graph_select_xlsx' in changed_id and tree_ids:
df_filter = df[df['tree_id'].isin(tree_ids)]
df_filter = df_filter.drop(columns = ['tree_dbh_vis'])
return dcc.send_data_frame(df_filter.to_excel, "StreetTreesOfNYC_graph_select.xlsx", sheet_name="Sheet_1")
return
## extract all data for a single tree selected by the user
@app.callback(dash.dependencies.Output('selectedTreeTable', 'data'),
dash.dependencies.Input('graph_mapbox', 'clickData'),)
def get_single_tree_data(selected_value):
## selected_value has the following exemplary structure:
## {'points': [{'curveNumber': 4, 'pointNumber': 554, 'pointIndex': 554, 'lon': -73.94091248, 'lat': 40.71911554, 'hovertext': 'Stump', 'marker.size': 5, 'customdata': ['nan', 'Stump', 'nan', 0, 5, 40.71911554, -73.94091248, '221731']}]}
## we are just interested in the tree_id which is the last value of
## 'customdata'
if selected_value:
tree_id = selected_value['points'][0]['customdata'][-1]
## get complete entry for tree_id
value = df.loc[df.tree_id == tree_id]
## remove tree_dbh_vis
value = value.drop(columns = ['tree_dbh_vis'])
## Transpose
value = value.T
## copy original row names into a new column
value['Trait'] = value.index
## create new column names
value.columns = ['Value', 'Trait']
# convert dataframe into dict which can be fed into DataTable
data = value.to_dict('records')
return data
return None
# ## only for testing and debugging
# @app.callback(dash.dependencies.Output('test_text', 'children'),
# dash.dependencies.Input("store_df_graph_select", "data"))
# def temp_fun(selected_values):
# if selected_values:
# #tree_ids = [val['customdata'][-1] for val in selected_values['points'] ]
# #df = pd.DataFrame.from_dict(selected_value)
# return '{} ############ '.format(selected_values)
# return None
if __name__ == '__main__':
app.run_server(debug=True, host='0.0.0.0')
``` |
{
"source": "jonasblasas/codecracking",
"score": 4
} |
#### File: codecracking/merge_sorted_arrays/merge_sorted_arrays.py
```python
import random
arr_x = []
arr_y = []
for i in range(0, 20):
arr_x.append(random.randrange(0, 100))
for i in range(0, 15):
arr_y.append(random.randrange(0, 100))
len_x = len(arr_x)
len_y = len(arr_y)
# This function takes last element as pivot, places
# the pivot element at its correct position in sorted
# array, and places all smaller (smaller than pivot)
# to left of pivot and all greater elements to right
# of pivot
def partition(arr,low,high):
i = ( low-1 ) # index of smaller element
pivot = arr[high] # pivot
for j in range(low , high):
# If current element is smaller than or
# equal to pivot
if arr[j] <= pivot:
# increment index of smaller element
i = i+1
arr[i],arr[j] = arr[j],arr[i]
arr[i+1],arr[high] = arr[high],arr[i+1]
return ( i+1 )
# The main function that implements QuickSort
# arr[] --> Array to be sorted,
# low --> Starting index,
# high --> Ending index
def quickSort(arr,low,high):
if low < high:
# pi is partitioning index, arr[p] is now
# at right place
pi = partition(arr,low,high)
# Separately sort elements before
# partition and after partition
quickSort(arr, low, pi-1)
quickSort(arr, pi+1, high)
quickSort(arr_x, 0, len_x-1)
quickSort(arr_y, 0, len_y-1)
print(arr_x)
print(arr_y)
def merge_sorted_arrays(arr_1, arr_2):
merged_array = []
i1 = 0
i2 = 0
while i1 < len(arr_1) and i2 < len(arr_2):
if arr_1[i1] < arr_2[i2]:
merged_array.append(arr_1[i1])
i1 += 1
else:
merged_array.append(arr_2[i2])
i2 += 1
for i in range(i1, len(arr_1)):
merged_array.append(arr_1[i])
for i in range(i2, len(arr_2)):
merged_array.append(arr_2[i])
return merged_array
print(merge_sorted_arrays(arr_x, arr_y))
```
#### File: codecracking/queues/queues_two_stacks.py
```python
class Stack:
def __init__(self):
self.data = []
def pop(self):
if self.is_empty():
return None
val = self.data[len(self.data)-1]
self.data = self.data[:len(self.data)-1]
return val
def peek(self):
if self.is_empty():
return None
return self.data[len(self.data)-1]
def push(self, val):
self.data.append(val)
def is_empty(self):
return len(self.data) == 0
class Queue:
def __init__(self):
self.in_stack = Stack()
self.out_stack = Stack()
def enqueue(self, val):
self.in_stack.push(val)
def dequeue(self):
if self.out_stack.is_empty():
if self.in_stack.is_empty():
return None
while self.in_stack.is_empty() == False:
self.out_stack.push(self.in_stack.pop())
return self.out_stack.pop()
def peek(self):
if self.out_stack.is_empty():
if self.in_stack.is_empty():
return None
while self.in_stack.is_empty() == False:
self.out_stack.push(self.in_stack.pop())
return self.out_stack.peek()
def is_empty(self):
return self.in_stack.is_empty() and self.out_stack.is_empty
def print_queue(q):
s = "["
for i in range(0, len(q.out_stack.data)):
s += str(q.out_stack.data[i])
if i < len(q.out_stack.data)-1:
s += ", "
for i in range(0, len(q.in_stack.data)):
s += str(q.in_stack.data[i])
if i < len(q.in_stack.data)-1:
s += ", "
s += "]"
print(s)
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
# should print [1, 2, 3]
print_queue(queue)
v = queue.dequeue()
# should print [2, 3]
print_queue(queue)
v = queue.dequeue()
# should print [3]
print_queue(queue)
queue.enqueue(4)
queue.enqueue(5)
queue.enqueue(6)
v = queue.dequeue()
# should print [4, 5, 6]
print_queue(queue)
v = queue.dequeue()
v = queue.dequeue()
v = queue.dequeue()
print_queue(queue)
v = queue.dequeue()
print_queue(queue)
``` |
{
"source": "jonasblixt/mongoose",
"score": 3
} |
#### File: hw/scripts/model.py
```python
import math
class Net(object):
def __init__(self, index, name):
self.index = index
self.name = name
self.length = 0.0
self.delay_ps = 0.0
self.segments = []
self.vias = []
def get_name(self):
return self.name
def add_segment(self, seg):
self.segments.append(seg)
self.length += seg.get_length_mm()
self.delay_ps += seg.get_delay_ps()
def add_via(self, via):
self.vias.append(via)
self.length += via.get_length_mm()
self.delay_ps += via.get_delay_ps()
def via_count(self):
return len(self.vias)
def get_delay_ps(self):
return self.delay_ps
def length(self):
return self.length
class ElectricalObject(object):
def __init__(self):
self.delay_ps = 0.0
self.length = 0.0
def get_delay_ps(self):
return self.delay_ps
def add_delay_ps(self, dly):
self.delay_ps += dly
def get_length_mm(self):
return self.length
class Pad(ElectricalObject):
pass
class Segment(ElectricalObject):
def __init__(self, pcb, sx, sy, ex, ey, w, layer_str):
ElectricalObject.__init__(self)
self.sx = sx
self.sy = sy
self.ex = ex
self.ey = ey
self.w = w
self.layer_str = layer_str
self.length += math.sqrt(math.pow(ex - sx, 2) + \
math.pow(ey - sy, 2))
e_eff = 0.475 * pcb.stackup.get_er() + 0.68
self.add_delay_ps(3.337*math.sqrt(e_eff))
class Via(ElectricalObject):
def __init__(self, pcb, x, y, dia, drill, start_layer, stop_layer, net_id):
ElectricalObject.__init__(self)
self.x = x
self.y = y
self.dia = dia
self.drill = drill
self.start_layer = start_layer
self.stop_layer = stop_layer
self.net_id = net_id
self.length = pcb.stackup.get_thickness()
er = pcb.stackup.get_er()
pcb_thickness_inch = pcb.stackup.get_thickness() / 25.4
dia_inch = drill / 25.4
dia_clearance_inch = dia / 25.4
C_via_pF = (1.41 * er * pcb_thickness_inch * dia_inch) \
/ (dia_clearance_inch - dia_inch)
L_via_nH = pcb_thickness_inch * 5.08 * \
(1 + math.log(4 * pcb_thickness_inch/dia_inch))
delay_via_ps = math.sqrt(L_via_nH * C_via_pF)
#print("Via %.2f pF, %.2f nH %.2f ps "%(C_via_pF, L_via_nH,
# delay_via_ps))
self.add_delay_ps(delay_via_ps)
class Layer(object):
def __init__(self, layer_idx, thickness, er, kind):
self.thickness = thickness
self.layer_idx = layer_idx
self.er = er
self.kind = kind
def get_thickness(self):
return self.thickness
def get_er(self):
return self.er
def get_index(self):
return self.layer_idx
class Stackup(object):
def __init__(self, name="Generic"):
self.name = name
self.layers_by_name = {}
self.layers_by_index = {}
self.layers = []
self.er = 4.0
self.cu_layer_count = 1
self.thickness = 0.0
def get_name(self):
return self.name
def get_er(self):
return self.er
def get_thickness(self):
return self.thickness
def add_cu_layer(self, thickness, layer_mapping_str):
l = Layer(self.cu_layer_count, thickness, self.er, "Cu")
self.layers_by_name[layer_mapping_str] = l
self.layers_by_index[self.cu_layer_count] = l
self.layers.append(l)
self.cu_layer_count += 1
self.thickness += thickness
return l
def add_pp_layer(self, thickness):
self.layers.append(Layer(-1, thickness, -1.0, "pp"))
self.thickness += thickness
def add_core_layer(self, thickness):
self.layers.append(Layer(-1, thickness, -1.0, "core"))
self.thickness += thickness
def get_layers(self):
return self.layers
def get_layer_by_index(self, idx):
return self.layers_by_index[idx]
def get_layer_by_name(self, name):
return self.layers_by_name[name]
def distance_from_to_layer(self, l1, l2):
dist = 0.0
start_measure = False
i = 0
while True:
l = self.layers[i]
if l.layer_idx == l2:
break
if start_measure:
dist += l.thickness
if l.layer_idx == l1:
start_measure = True
i = i + 1
return dist
class PCB(object):
def __init__(self, stackup):
self.pcb_thickness = -1.0
self.stackup = stackup
self.nets = {}
def add_segment(self, net_idx, seg):
net = self.nets[net_idx]
net.add_segment(seg)
def add_net(self, name, net):
self.nets[name] = net
def add_via(self, net_idx, via):
net = self.nets[net_idx]
net.add_via(via)
def get_nets(self):
return self.nets
def process(self):
assert(self.pcb_thickness > 0.0)
assert(len(self.nets.keys()) > 0)
# Calculate distance added by via's
for n in self.nets.keys():
net = self.nets[n]
for v in net.vias:
segs = []
for s in net.segments:
if (v.x == s.sx and v.y == s.sy) or \
(v.x == s.ex and v.y == s.ey):
segs.append(s)
if len(segs) > 1:
l_start = self.stackup.get_layer_by_name(segs[0].layer_str)
l_end = self.stackup.get_layer_by_name(segs[1].layer_str)
net.length += \
self.stackup.distance_from_to_layer(l_start.layer_idx,
l_end.layer_idx)
print("Found %u nets"%(len(self.nets.keys())))
``` |
{
"source": "jonasbn/cloudsuite",
"score": 2
} |
#### File: django-workload/django_workload/middleware.py
```python
from django.utils.deprecation import MiddlewareMixin
from django_statsd.middleware import (
GraphiteMiddleware,
GraphiteRequestTimingMiddleware,
)
# Used for sample-based profiling
SAMPLE_COUNT = 0
# Update django_statsd middleware to newer Django requirements
class GraphiteMiddleware(MiddlewareMixin, GraphiteMiddleware):
pass
class GraphiteRequestTimingMiddleware(
MiddlewareMixin, GraphiteRequestTimingMiddleware):
pass
# We need access to request metadata from within patched support code. Store
# the request in a thread global
def global_request_middleware(get_response):
from .global_request import ThreadLocalRequest
def middleware(request):
with ThreadLocalRequest(request):
return get_response(request)
return middleware
# Record memory and CPU stats per view
def memory_cpu_stats_middleware(get_response):
import time
import psutil
from collections import Counter
from django_statsd.clients import statsd
from .global_request import get_view_name
from django.conf import settings
mem_entries = (
'rss',
'shared_clean', 'shared_dirty',
'private_clean', 'private_dirty'
)
def summed(info):
res = dict.fromkeys(mem_entries, 0)
for path_info in info:
for name in mem_entries:
res[name] += getattr(path_info, name)
return res
def middleware(request):
global SAMPLE_COUNT
SAMPLE_COUNT += 1
if SAMPLE_COUNT >= settings.SAMPLE_RATE:
SAMPLE_COUNT = 0
cpu_before = time.clock_gettime(time.CLOCK_PROCESS_CPUTIME_ID)
mem_before = summed(psutil.Process().memory_maps())
try:
return get_response(request)
finally:
cpu_after = time.clock_gettime(time.CLOCK_PROCESS_CPUTIME_ID)
statsd.gauge(
'cpu.{}'.format(get_view_name()),
cpu_after - cpu_before)
mem_after = summed(psutil.Process().memory_maps())
mem_key_base = 'memory.{}.{{}}'.format(get_view_name())
for name, after in mem_after.items():
diff = after - mem_before[name]
statsd.gauge(mem_key_base.format(name) + '.total', after)
statsd.gauge(mem_key_base.format(name) + '.change', diff)
else:
return get_response(request)
return middleware
```
#### File: django-workload/django_workload/patches.py
```python
from functools import wraps
from inspect import getdoc
from statsd.defaults import django as statsd_django_defaults
from statsd.client import StatsClient
from .global_request import get_view_name
from django.conf import settings
_patches = []
# Used for sample-based profiling
CASSANDRA_COUNT = 0
MEMCACHED_COUNT = 0
def register_patch(f):
_patches.append((f, (getdoc(f) or '').partition('\n')[0]))
# TODO: send a pull request upstream
@register_patch
def patch_django_statsd_ipv6():
"""Make django_statsd work with IPv6"""
def insert_ipv6(**kwargs):
if 'ipv6' not in kwargs:
kwargs['ipv6'] = statsd_django_defaults.ipv6
return StatsClient(**kwargs)
# Only needs to be applied if STATSD_IPV6 is set to True and the connection
# fails when trying to import the client.
try:
from django_statsd.clients import normal
except OSError as e:
if e.errno == -2: # Name or service not known
# patch the client to make sure we can support IPv6
# Use sys.modules to avoid triggering the exception again
import sys
normal = sys.modules['django_statsd.clients.normal']
normal.StatsClient = insert_ipv6
else:
raise
if settings.PROFILING:
@register_patch
def patch_cassandra_execute():
"""Record timings for Cassandra operations"""
from django_statsd.clients import statsd
from cassandra.cqlengine.query import AbstractQuerySet
def decorator(orig):
@wraps(orig)
def timed_execute(self, *args, **kwargs):
global CASSANDRA_COUNT
CASSANDRA_COUNT += 1
if CASSANDRA_COUNT >= settings.SAMPLE_RATE:
CASSANDRA_COUNT = 0
key = 'cassandra.{}.execute'.format(get_view_name())
statsd.incr(key)
with statsd.timer(key):
return orig(self, *args, **kwargs)
else:
return orig(self, *args, **kwargs)
return timed_execute
AbstractQuerySet._execute = decorator(AbstractQuerySet._execute)
@register_patch
def patch_cassandra_uwsgi_postfork():
"""Reset cassandra connection after forking.
When running under uwsgi, use postfork to re-set the cassandra connection.
Otherwise we run into issues with shared connections which time out.
"""
try:
from uwsgidecorators import postfork
except ImportError:
# Not available, presumably we are not running under uwsgi
return
from django_cassandra_engine.utils import get_cassandra_connections
@postfork
def reset_cassandra_connection():
for _, conn in get_cassandra_connections():
conn.reconnect()
if settings.PROFILING:
@register_patch
def patch_memcached_methods():
"""Record timings for the Memcached Django integration"""
from django.core.cache.backends.memcached import BaseMemcachedCache
from django_statsd.clients import statsd
def decorator(orig):
@wraps(orig)
def timed(self, *args, **kwargs):
global MEMCACHED_COUNT
MEMCACHED_COUNT += 1
if MEMCACHED_COUNT >= settings.SAMPLE_RATE:
MEMCACHED_COUNT = 0
key = 'memcached.{}.{}'.format(get_view_name(), orig.__name__)
with statsd.timer(key):
return orig(self, *args, **kwargs)
else:
return orig(self, *args, **kwargs)
return timed
for name in ('add', 'get', 'set', 'delete', 'get_many', 'incr', 'decr',
'set_many', 'delete_many'):
orig = getattr(BaseMemcachedCache, name)
setattr(BaseMemcachedCache, name, decorator(orig))
def apply():
for patch, descr in _patches:
print(descr)
patch()
```
#### File: django-workload/django_workload/users.py
```python
import random
from functools import wraps
from threading import Lock
from .models import UserModel
# global cache; normally fetching users is cached in memcached or similar
user_ids = None
def all_users():
global user_ids
if user_ids is None:
with Lock():
# re-check after acquiring the lock, as another thread could have
# taken it between checking for None and requesting the lock.
if user_ids is None:
user_ids = list(UserModel.objects.values_list('id', flat=True))
return user_ids
def require_user(view):
@wraps(view)
def wrapper(request, *args, **kwargs):
users = all_users()
user_id = users[0]
user_idx = random.randint(0, len(users))
for i in range(len(users)):
if i == user_idx:
user_id = users[user_idx]
request.user = UserModel.objects.get(id=user_id)
return view(request, *args, **kwargs)
return wrapper
def suggested_users(user, count=5):
"""Suggest a number of users for this user to follow
A random sample of users not already followed is included.
"""
followed = set(user.following)
return random.sample(
[uuid for uuid in all_users() if uuid not in followed], count)
``` |
{
"source": "jonasboecquaert/connexion",
"score": 2
} |
#### File: connexion/apis/flask_api.py
```python
import logging
import warnings
from typing import Any
import flask
from werkzeug.local import LocalProxy
from connexion.apis import flask_utils
from connexion.apis.abstract import AbstractAPI
from connexion.jsonifier import Jsonifier
from connexion.lifecycle import ConnexionRequest, ConnexionResponse
from connexion.utils import is_json_mimetype
logger = logging.getLogger('connexion.apis.flask_api')
class FlaskApi(AbstractAPI):
def _set_base_path(self, base_path):
super()._set_base_path(base_path)
self._set_blueprint()
def _set_blueprint(self):
logger.debug('Creating API blueprint: %s', self.base_path)
endpoint = flask_utils.flaskify_endpoint(self.base_path)
self.blueprint = flask.Blueprint(endpoint, __name__, url_prefix=self.base_path,
template_folder=str(self.options.openapi_console_ui_from_dir))
def _add_operation_internal(self, method, path, operation):
operation_id = operation.operation_id
logger.debug('... Adding %s -> %s', method.upper(), operation_id,
extra=vars(operation))
flask_path = flask_utils.flaskify_path(path, operation.get_path_parameter_types())
endpoint_name = flask_utils.flaskify_endpoint(operation.operation_id,
operation.randomize_endpoint)
function = operation.function
self.blueprint.add_url_rule(flask_path, endpoint_name, function, methods=[method])
@classmethod
def get_response(cls, response, mimetype=None, request=None):
"""Gets ConnexionResponse instance for the operation handler
result. Status Code and Headers for response. If only body
data is returned by the endpoint function, then the status
code will be set to 200 and no headers will be added.
If the returned object is a flask.Response then it will just
pass the information needed to recreate it.
:type response: flask.Response | (flask.Response,) | (flask.Response, int) | (flask.Response, dict) | (flask.Response, int, dict)
:rtype: ConnexionResponse
"""
return cls._get_response(response, mimetype=mimetype, extra_context={"url": flask.request.url})
@classmethod
def _is_framework_response(cls, response):
""" Return True if provided response is a framework type """
return flask_utils.is_flask_response(response)
@classmethod
def _framework_to_connexion_response(cls, response, mimetype):
""" Cast framework response class to ConnexionResponse used for schema validation """
return ConnexionResponse(
status_code=response.status_code,
mimetype=response.mimetype,
content_type=response.content_type,
headers=response.headers,
body=response.get_data() if not response.direct_passthrough else None,
is_streamed=response.is_streamed
)
@classmethod
def _connexion_to_framework_response(cls, response, mimetype, extra_context=None):
""" Cast ConnexionResponse to framework response class """
flask_response = cls._build_response(
mimetype=response.mimetype or mimetype,
content_type=response.content_type,
headers=response.headers,
status_code=response.status_code,
data=response.body,
extra_context=extra_context,
)
return flask_response
@classmethod
def _build_response(cls, mimetype, content_type=None, headers=None, status_code=None, data=None, extra_context=None):
if cls._is_framework_response(data):
return flask.current_app.make_response((data, status_code, headers))
data, status_code, serialized_mimetype = cls._prepare_body_and_status_code(data=data, mimetype=mimetype, status_code=status_code, extra_context=extra_context)
kwargs = {
'mimetype': mimetype or serialized_mimetype,
'content_type': content_type,
'headers': headers,
'response': data,
'status': status_code
}
kwargs = {k: v for k, v in kwargs.items() if v is not None}
return flask.current_app.response_class(**kwargs)
@classmethod
def _serialize_data(cls, data, mimetype):
if (isinstance(mimetype, str) and is_json_mimetype(mimetype)):
body = cls.jsonifier.dumps(data)
elif not (isinstance(data, bytes) or isinstance(data, str)):
warnings.warn(
"Implicit (flask) JSON serialization will change in the next major version. "
"This is triggered because a response body is being serialized as JSON "
"even though the mimetype is not a JSON type. "
"This will be replaced by something that is mimetype-specific and may "
"raise an error instead of silently converting everything to JSON. "
"Please make sure to specify media/mime types in your specs.",
FutureWarning # a Deprecation targeted at application users.
)
body = cls.jsonifier.dumps(data)
else:
body = data
return body, mimetype
@classmethod
def get_request(cls, *args, **params):
# type: (*Any, **Any) -> ConnexionRequest
"""Gets ConnexionRequest instance for the operation handler
result. Status Code and Headers for response. If only body
data is returned by the endpoint function, then the status
code will be set to 200 and no headers will be added.
If the returned object is a flask.Response then it will just
pass the information needed to recreate it.
:rtype: ConnexionRequest
"""
flask_request = flask.request
scope = flask_request.environ['asgi.scope']
context_dict = scope.get('extensions', {}).get('connexion_context', {})
setattr(flask._request_ctx_stack.top, 'connexion_context', context_dict)
request = ConnexionRequest(
flask_request.url,
flask_request.method,
headers=flask_request.headers,
form=flask_request.form,
query=flask_request.args,
body=flask_request.get_data(),
json_getter=lambda: flask_request.get_json(silent=True),
files=flask_request.files,
path_params=params,
context=context_dict,
cookies=flask_request.cookies,
)
logger.debug('Getting data and status code',
extra={
'data': request.body,
'data_type': type(request.body),
'url': request.url
})
return request
@classmethod
def _set_jsonifier(cls):
"""
Use Flask specific JSON loader
"""
cls.jsonifier = Jsonifier(flask.json, indent=2)
def _get_context():
return getattr(flask._request_ctx_stack.top, 'connexion_context')
context = LocalProxy(_get_context)
```
#### File: connexion/middleware/security.py
```python
import logging
import pathlib
import typing as t
from collections import defaultdict
from starlette.types import ASGIApp, Receive, Scope, Send
from connexion.apis.abstract import AbstractSpecAPI
from connexion.exceptions import MissingMiddleware
from connexion.http_facts import METHODS
from connexion.lifecycle import MiddlewareRequest
from connexion.middleware import AppMiddleware
from connexion.middleware.routing import ROUTING_CONTEXT
from connexion.security import SecurityHandlerFactory
logger = logging.getLogger("connexion.middleware.security")
class SecurityMiddleware(AppMiddleware):
"""Middleware to check if operation is accessible on scope."""
def __init__(self, app: ASGIApp) -> None:
self.app = app
self.apis: t.Dict[str, SecurityAPI] = {}
def add_api(self, specification: t.Union[pathlib.Path, str, dict], **kwargs) -> None:
api = SecurityAPI(specification, **kwargs)
self.apis[api.base_path] = api
async def __call__(self, scope: Scope, receive: Receive, send: Send):
if scope["type"] != "http":
await self.app(scope, receive, send)
return
try:
connexion_context = scope['extensions'][ROUTING_CONTEXT]
except KeyError:
raise MissingMiddleware('Could not find routing information in scope. Please make sure '
'you have a routing middleware registered upstream. ')
api_base_path = connexion_context.get('api_base_path')
if api_base_path:
api = self.apis[api_base_path]
operation_id = connexion_context.get('operation_id')
try:
operation = api.operations[operation_id]
except KeyError as e:
if operation_id is None:
logger.debug('Skipping security check for operation without id. Enable '
'`auth_all_paths` to check security for unknown operations.')
else:
raise MissingSecurityOperation('Encountered unknown operation_id.') from e
else:
request = MiddlewareRequest(scope)
await operation(request)
await self.app(scope, receive, send)
class SecurityAPI(AbstractSpecAPI):
def __init__(
self,
specification: t.Union[pathlib.Path, str, dict],
auth_all_paths: bool = False,
*args,
**kwargs
):
super().__init__(specification, *args, **kwargs)
self.security_handler_factory = SecurityHandlerFactory('context')
self.app_security = self.specification.security
self.security_schemes = self.specification.security_definitions
if auth_all_paths:
self.add_auth_on_not_found()
else:
self.operations: t.Dict[str, SecurityOperation] = {}
self.add_paths()
def add_auth_on_not_found(self):
"""Register a default SecurityOperation for routes that are not found."""
default_operation = self.make_operation()
self.operations = defaultdict(lambda: default_operation)
def add_paths(self):
paths = self.specification.get('paths', {})
for path, methods in paths.items():
for method, operation in methods.items():
if method not in METHODS:
continue
operation_id = operation.get('operationId')
if operation_id:
self.operations[operation_id] = self.make_operation(operation)
def make_operation(self, operation_spec: dict = None):
security = self.app_security
if operation_spec:
security = operation_spec.get('security', self.app_security)
return SecurityOperation(
self.security_handler_factory,
security=security,
security_schemes=self.specification.security_definitions
)
class SecurityOperation:
def __init__(
self,
security_handler_factory: SecurityHandlerFactory,
security: list,
security_schemes: dict
):
self.security_handler_factory = security_handler_factory
self.security = security
self.security_schemes = security_schemes
self.verification_fn = self._get_verification_fn()
def _get_verification_fn(self):
logger.debug('... Security: %s', self.security, extra=vars(self))
if not self.security:
return self.security_handler_factory.security_passthrough
auth_funcs = []
for security_req in self.security:
if not security_req:
auth_funcs.append(self.security_handler_factory.verify_none())
continue
sec_req_funcs = {}
oauth = False
for scheme_name, required_scopes in security_req.items():
security_scheme = self.security_schemes[scheme_name]
if security_scheme['type'] == 'oauth2':
if oauth:
logger.warning(
"... multiple OAuth2 security schemes in AND fashion not supported",
extra=vars(self))
break
oauth = True
token_info_func = self.security_handler_factory.get_tokeninfo_func(
security_scheme)
scope_validate_func = self.security_handler_factory.get_scope_validate_func(
security_scheme)
if not token_info_func:
logger.warning("... x-tokenInfoFunc missing", extra=vars(self))
break
sec_req_funcs[scheme_name] = self.security_handler_factory.verify_oauth(
token_info_func, scope_validate_func, required_scopes)
# Swagger 2.0
elif security_scheme['type'] == 'basic':
basic_info_func = self.security_handler_factory.get_basicinfo_func(
security_scheme)
if not basic_info_func:
logger.warning("... x-basicInfoFunc missing", extra=vars(self))
break
sec_req_funcs[scheme_name] = self.security_handler_factory.verify_basic(
basic_info_func)
# OpenAPI 3.0.0
elif security_scheme['type'] == 'http':
scheme = security_scheme['scheme'].lower()
if scheme == 'basic':
basic_info_func = self.security_handler_factory.get_basicinfo_func(
security_scheme)
if not basic_info_func:
logger.warning("... x-basicInfoFunc missing", extra=vars(self))
break
sec_req_funcs[
scheme_name] = self.security_handler_factory.verify_basic(
basic_info_func)
elif scheme == 'bearer':
bearer_info_func = self.security_handler_factory.get_bearerinfo_func(
security_scheme)
if not bearer_info_func:
logger.warning("... x-bearerInfoFunc missing", extra=vars(self))
break
sec_req_funcs[
scheme_name] = self.security_handler_factory.verify_bearer(
bearer_info_func)
else:
logger.warning("... Unsupported http authorization scheme %s" % scheme,
extra=vars(self))
break
elif security_scheme['type'] == 'apiKey':
scheme = security_scheme.get('x-authentication-scheme', '').lower()
if scheme == 'bearer':
bearer_info_func = self.security_handler_factory.get_bearerinfo_func(
security_scheme)
if not bearer_info_func:
logger.warning("... x-bearerInfoFunc missing", extra=vars(self))
break
sec_req_funcs[
scheme_name] = self.security_handler_factory.verify_bearer(
bearer_info_func)
else:
apikey_info_func = self.security_handler_factory.get_apikeyinfo_func(
security_scheme)
if not apikey_info_func:
logger.warning("... x-apikeyInfoFunc missing", extra=vars(self))
break
sec_req_funcs[
scheme_name] = self.security_handler_factory.verify_api_key(
apikey_info_func, security_scheme['in'], security_scheme['name']
)
else:
logger.warning(
"... Unsupported security scheme type %s" % security_scheme['type'],
extra=vars(self))
break
else:
# No break encountered: no missing funcs
if len(sec_req_funcs) == 1:
(func,) = sec_req_funcs.values()
auth_funcs.append(func)
else:
auth_funcs.append(
self.security_handler_factory.verify_multiple_schemes(sec_req_funcs))
return self.security_handler_factory.verify_security(auth_funcs)
async def __call__(self, request: MiddlewareRequest):
await self.verification_fn(request)
class MissingSecurityOperation(Exception):
pass
```
#### File: connexion/tests/test_middleware.py
```python
import pytest
from connexion.middleware import ConnexionMiddleware
from starlette.datastructures import MutableHeaders
from conftest import SPECS, build_app_from_fixture
class TestMiddleware:
"""Middleware to check if operation is accessible on scope."""
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
operation_id = scope['extensions']['connexion_routing']['operation_id']
async def patched_send(message):
if message["type"] != "http.response.start":
await send(message)
return
message.setdefault("headers", [])
headers = MutableHeaders(scope=message)
headers["operation_id"] = operation_id
await send(message)
await self.app(scope, receive, patched_send)
@pytest.fixture(scope="session", params=SPECS)
def middleware_app(request):
middlewares = ConnexionMiddleware.default_middlewares + [TestMiddleware]
return build_app_from_fixture('simple', request.param, middlewares=middlewares)
def test_routing_middleware(middleware_app):
app_client = middleware_app.app.test_client()
response = app_client.post("/v1.0/greeting/robbe")
assert response.headers.get('operation_id') == 'fakeapi.hello.post_greeting', \
response.status_code
```
#### File: connexion/tests/test_operation2.py
```python
import copy
import logging
import math
import pathlib
import types
from unittest import mock
import pytest
from connexion.apis.flask_api import Jsonifier
from connexion.exceptions import InvalidSpecification
from connexion.json_schema import resolve_refs
from connexion.middleware.security import SecurityOperation
from connexion.operations import Swagger2Operation
from connexion.resolver import Resolver
TEST_FOLDER = pathlib.Path(__file__).parent
DEFINITIONS = {'new_stack': {'required': ['image_version', 'keep_stacks', 'new_traffic', 'senza_yaml'],
'type': 'object',
'properties': {'keep_stacks': {'type': 'integer',
'description':
'Number of older stacks to keep'},
'image_version': {'type': 'string',
'description':
'Docker image version to deploy'},
'senza_yaml': {'type': 'string',
'description': 'YAML to provide to senza'},
'new_traffic': {'type': 'integer',
'description':
'Percentage of the traffic'}}},
'composed': {'required': ['test'],
'type': 'object',
'properties': {'test': {'schema': {'$ref': '#/definitions/new_stack'}}}},
'problem': {"not": "defined"}}
PARAMETER_DEFINITIONS = {'myparam': {'in': 'path', 'type': 'integer'}}
OPERATION1 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}}],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION2 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}},
{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}}],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION3 = {'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'$ref': '#/parameters/myparam'}]}
OPERATION4 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [
{
'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}
},
{
'in': 'query',
'name': 'stack_version',
'default': 'one',
'type': 'number'
}
],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'summary': 'Create new stack'}
OPERATION5 = {
'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [
{
'in': 'body',
'name': 'new_stack',
'required': True,
'type': 'integer',
'default': 'stack'
}
],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'
}
OPERATION6 = {
'operationId': 'fakeapi.hello.schema',
'parameters': [
{
'type': 'object',
'in': 'body',
'name': 'new_stack',
'default': {'keep_stack': 1, 'image_version': 1, 'senza_yaml': 'senza.yaml',
'new_traffic': 100},
'schema': {'$ref': '#/definitions/new_stack'}
}
],
'responses': {},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'
}
OPERATION7 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'type': 'array', 'items': {'$ref': '#/definitions/new_stack'}}}],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION8 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'test',
'required': True,
'schema': {'$ref': '#/definitions/composed'}}],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/new_stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION9 = {'description': 'operation secured with 2 api keys',
'operationId': 'fakeapi.hello.post_greeting',
'responses': {'200': {'description': 'OK'}},
'security': [{'key1': [], 'key2': []}]}
OPERATION10 = {'description': 'operation secured with 2 oauth schemes combined using logical AND',
'operationId': 'fakeapi.hello.post_greeting',
'responses': {'200': {'description': 'OK'}},
'security': [{'oauth_1': ['uid'], 'oauth_2': ['uid']}]}
OPERATION11 = {'description': 'operation secured with an oauth schemes with 2 possible scopes (in OR)',
'operationId': 'fakeapi.hello.post_greeting',
'responses': {'200': {'description': 'OK'}},
'security': [{'oauth': ['myscope']}, {'oauth': ['myscope2']}]}
SECURITY_DEFINITIONS_REMOTE = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoUrl': 'https://oauth.example/token_info',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_LOCAL = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'scopes': {'myscope': 'can do stuff',
'myscope2': 'can do other stuff'}}}
SECURITY_DEFINITIONS_BOTH = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'x-tokenInfoUrl': 'https://oauth.example/token_info',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_WO_INFO = {'oauth': {'type': 'oauth2',
'flow': 'password',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_2_KEYS = {'key1': {'type': 'apiKey',
'in': 'header',
'name': 'X-Auth-1',
'x-apikeyInfoFunc': 'math.ceil'},
'key2': {'type': 'apiKey',
'in': 'header',
'name': 'X-Auth-2',
'x-apikeyInfoFunc': 'math.ceil'}}
SECURITY_DEFINITIONS_2_OAUTH = {'oauth_1': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'scopes': {'myscope': 'can do stuff'}},
'oauth_2': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'scopes': {'myscope': 'can do stuff'}}}
@pytest.fixture
def api(security_handler_factory):
api = mock.MagicMock(jsonifier=Jsonifier)
api.security_handler_factory = security_handler_factory
yield api
def make_operation(op, definitions=True, parameters=True):
""" note the wrapper because definitions namespace and
operation namespace collide
"""
new_op = {"wrapper": copy.deepcopy(op)}
if definitions:
new_op.update({"definitions": DEFINITIONS})
if parameters:
new_op.update({"parameters": PARAMETER_DEFINITIONS})
return resolve_refs(new_op)["wrapper"]
def test_operation(api, security_handler_factory):
op_spec = make_operation(OPERATION1)
operation = Swagger2Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=op_spec,
app_produces=['application/json'],
app_consumes=['application/json'],
definitions=DEFINITIONS,
resolver=Resolver())
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
expected_body_schema = op_spec["parameters"][0]["schema"]
expected_body_schema.update({'definitions': DEFINITIONS})
assert operation.body_schema == expected_body_schema
def test_operation_remote_token_info(security_handler_factory):
verify_oauth = mock.MagicMock(return_value='verify_oauth_result')
security_handler_factory.verify_oauth = verify_oauth
security_handler_factory.get_token_info_remote = mock.MagicMock(return_value='get_token_info_remote_result')
SecurityOperation(security_handler_factory=security_handler_factory,
security=[{'oauth': ['uid']}],
security_schemes=SECURITY_DEFINITIONS_REMOTE)
verify_oauth.assert_called_with('get_token_info_remote_result',
security_handler_factory.validate_scope,
['uid'])
security_handler_factory.get_token_info_remote.assert_called_with('https://oauth.example/token_info')
def test_operation_array(api):
op_spec = make_operation(OPERATION7)
operation = Swagger2Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=op_spec,
app_produces=['application/json'],
app_consumes=['application/json'],
definitions=DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
expected_body_schema = {
'type': 'array',
'items': DEFINITIONS["new_stack"],
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_operation_composed_definition(api):
op_spec = make_operation(OPERATION8)
operation = Swagger2Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=op_spec,
app_produces=['application/json'],
app_consumes=['application/json'],
definitions=DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
expected_body_schema = op_spec["parameters"][0]["schema"]
expected_body_schema.update({'definitions': DEFINITIONS})
assert operation.body_schema == expected_body_schema
def test_operation_local_security_oauth2(security_handler_factory):
verify_oauth = mock.MagicMock(return_value='verify_oauth_result')
security_handler_factory.verify_oauth = verify_oauth
SecurityOperation(security_handler_factory=security_handler_factory,
security=[{'oauth': ['uid']}],
security_schemes=SECURITY_DEFINITIONS_LOCAL)
verify_oauth.assert_called_with(math.ceil, security_handler_factory.validate_scope, ['uid'])
def test_operation_local_security_duplicate_token_info(security_handler_factory):
verify_oauth = mock.MagicMock(return_value='verify_oauth_result')
security_handler_factory.verify_oauth = verify_oauth
SecurityOperation(security_handler_factory,
security=[{'oauth': ['uid']}],
security_schemes=SECURITY_DEFINITIONS_BOTH)
verify_oauth.call_args.assert_called_with(math.ceil, security_handler_factory.validate_scope)
def test_multi_body(api):
with pytest.raises(InvalidSpecification) as exc_info: # type: py.code.ExceptionInfo
op_spec = make_operation(OPERATION2)
operation = Swagger2Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=op_spec,
app_produces=['application/json'],
app_consumes=['application/json'],
definitions=DEFINITIONS,
resolver=Resolver())
operation.body_schema
exception = exc_info.value
assert str(exception) == "GET endpoint There can be one 'body' parameter at most"
assert repr(exception) == """<InvalidSpecification: "GET endpoint There can be one 'body' parameter at most">"""
def test_no_token_info(security_handler_factory):
SecurityOperation(security_handler_factory=security_handler_factory,
security=[{'oauth': ['uid']}],
security_schemes=SECURITY_DEFINITIONS_WO_INFO)
def test_multiple_security_schemes_and(security_handler_factory):
"""Tests an operation with multiple security schemes in AND fashion."""
def return_api_key_name(func, in_, name):
return name
verify_api_key = mock.MagicMock(side_effect=return_api_key_name)
security_handler_factory.verify_api_key = verify_api_key
verify_multiple = mock.MagicMock(return_value='verify_multiple_result')
security_handler_factory.verify_multiple_schemes = verify_multiple
security = [{'key1': [], 'key2': []}]
SecurityOperation(security_handler_factory=security_handler_factory,
security=security,
security_schemes=SECURITY_DEFINITIONS_2_KEYS)
assert verify_api_key.call_count == 2
verify_api_key.assert_any_call(math.ceil, 'header', 'X-Auth-1')
verify_api_key.assert_any_call(math.ceil, 'header', 'X-Auth-2')
# Assert verify_multiple_schemes is called with mapping from scheme name
# to result of security_handler_factory.verify_api_key()
verify_multiple.assert_called_with({'key1': 'X-Auth-1', 'key2': 'X-Auth-2'})
def test_multiple_oauth_in_and(security_handler_factory, caplog):
"""Tests an operation with multiple oauth security schemes in AND fashion.
These should be ignored and raise a warning.
"""
caplog.set_level(logging.WARNING, logger="connexion.operations.secure")
verify_oauth = mock.MagicMock(return_value='verify_oauth_result')
security_handler_factory.verify_oauth = verify_oauth
security = [{'oauth_1': ['uid'], 'oauth_2': ['uid']}]
SecurityOperation(security_handler_factory=security_handler_factory,
security=security,
security_schemes=SECURITY_DEFINITIONS_2_OAUTH)
assert '... multiple OAuth2 security schemes in AND fashion not supported' in caplog.text
def test_parameter_reference(api):
op_spec = make_operation(OPERATION3, definitions=False)
operation = Swagger2Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=op_spec,
app_produces=['application/json'],
app_consumes=['application/json'],
definitions={},
resolver=Resolver())
assert operation.parameters == [{'in': 'path', 'type': 'integer'}]
def test_default(api):
op_spec = make_operation(OPERATION4)
op_spec['parameters'][1]['default'] = 1
Swagger2Operation(
api=api, method='GET', path='endpoint', path_parameters=[],
operation=op_spec, app_produces=['application/json'],
app_consumes=['application/json'], definitions=DEFINITIONS,
resolver=Resolver()
)
op_spec = make_operation(OPERATION6, parameters=False)
op_spec['parameters'][0]['default'] = {
'keep_stacks': 1,
'image_version': 'one',
'senza_yaml': 'senza.yaml',
'new_traffic': 100
}
Swagger2Operation(
api=api, method='POST', path='endpoint', path_parameters=[],
operation=op_spec, app_produces=['application/json'],
app_consumes=['application/json'], definitions=DEFINITIONS,
resolver=Resolver()
)
def test_get_path_parameter_types(api):
op_spec = make_operation(OPERATION1, parameters=False)
op_spec['parameters'] = [
{'in': 'path', 'type': 'int', 'name': 'int_path'},
{'in': 'path', 'type': 'string', 'name': 'string_path'},
{'in': 'path', 'type': 'string', 'format': 'path', 'name': 'path_path'}
]
operation = Swagger2Operation(
api=api, method='GET', path='endpoint', path_parameters=[],
operation=op_spec, app_produces=['application/json'],
app_consumes=['application/json'],
definitions=DEFINITIONS, resolver=Resolver()
)
assert {'int_path': 'int', 'string_path': 'string', 'path_path': 'path'} == operation.get_path_parameter_types()
def test_oauth_scopes_in_or(security_handler_factory):
"""Tests whether an OAuth security scheme with 2 different possible scopes is correctly handled."""
verify_oauth = mock.MagicMock(return_value='verify_oauth_result')
security_handler_factory.verify_oauth = verify_oauth
security = [{'oauth': ['myscope']}, {'oauth': ['myscope2']}]
SecurityOperation(security_handler_factory=security_handler_factory,
security=security,
security_schemes=SECURITY_DEFINITIONS_LOCAL)
verify_oauth.assert_has_calls([
mock.call(math.ceil, security_handler_factory.validate_scope, ['myscope']),
mock.call(math.ceil, security_handler_factory.validate_scope, ['myscope2']),
])
``` |
{
"source": "jonasboman/webperf_core",
"score": 2
} |
#### File: webperf_core/tests/w3c_base.py
```python
import subprocess
import sys
import json
import requests
import json
import config
from tests.utils import *
# DEFAULTS
request_timeout = config.http_request_timeout
useragent = config.useragent
css_review_group_errors = config.css_review_group_errors
review_show_improvements_only = config.review_show_improvements_only
w3c_use_website = config.w3c_use_website
def get_errors(test_type, headers, params, data=None):
if w3c_use_website:
return get_errors_from_service(test_type, headers, params, data)
else:
return get_errors_from_npm(test_type, params, data)
def get_errors_from_npm(test_type, params, data=None):
url = ''
arg = ''
test_arg = ''
errors = list()
if 'css' in params or test_type == 'css':
test_arg = ' --css --skip-non-css'
if 'html' in params or test_type == 'html':
test_arg = ' --html --skip-non-html'
if 'doc' in params:
url = params['doc']
if 'https://' not in url and 'http://' not in url:
raise Exception(
'Tested url must start with \'https://\' or \'http://\': {0}'.format(url))
arg = '--exit-zero-always{1} --stdout --format json --errors-only {0}'.format(
url, test_arg)
else:
arg = '--exit-zero-always{1} --stdout --format json --errors-only \'{0}\''.format(
data, test_arg)
bashCommand = "java -jar vnu.jar {0}".format(arg)
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
# print('output', output)
# print('error', error)
json_result = json.loads(output)
if 'messages' in json_result:
errors = json_result['messages']
return errors
def get_errors_from_service(test_type, headers, params, data=None):
errors = list()
try:
service_url = 'https://validator.w3.org/nu/'
if data == None:
request = requests.get(service_url, allow_redirects=True,
headers=headers,
timeout=request_timeout * 2,
params=params)
else:
request = requests.post(service_url, allow_redirects=True,
headers=headers,
timeout=request_timeout,
params=params,
data=data)
# get JSON
response = json.loads(request.text)
if 'messages' in response:
errors = response['messages']
return errors
except Exception:
print('Unknown Error!\nMessage:\n{0}'.format(sys.exc_info()[0]))
return errors
except requests.Timeout:
print('Timeout!\nMessage:\n{0}'.format(sys.exc_info()[0]))
return errors
``` |
{
"source": "jonasborges/kandelero",
"score": 3
} |
#### File: kandelero/kandelero/utils.py
```python
import os
from datetime import datetime
from typing import Union
import pandas as pd
def parse_date(value: Union[str, datetime, None]) -> Union[datetime, None]:
if isinstance(value, datetime):
return value
elif isinstance(value, str):
return datetime.fromisoformat(value)
elif value is None:
return None
else:
raise ValueError(f"Impossible to parse date from value: {value}")
def get_filename(symbol: str, timeframe: str):
BASE_DIR = os.getenv("DATADIR")
filename = f"{symbol}-{timeframe}.csv"
return os.path.join(BASE_DIR, filename)
def get_df(
symbol: str, timeframe: str, date_filter: Union[datetime, str, None] = None
) -> pd.DataFrame:
df = pd.read_csv(get_filename(symbol=symbol, timeframe=timeframe))
if date_filter:
date = parse_date(date_filter)
df.date = pd.to_datetime(df.date)
df = df[df.date >= date]
return df
```
#### File: kandelero/tests/conftest.py
```python
import logging
import os
import pytest
from hypothesis import HealthCheck, assume, settings
from hypothesis import strategies as st
from kandelero.calculations import DECIMAL_PLACES, MAX_VALUE, MIN_VALUE
settings.register_profile(
"ci",
max_examples=100,
suppress_health_check=(
HealthCheck.filter_too_much,
HealthCheck.too_slow,
),
)
settings.register_profile(
"default",
max_examples=50,
suppress_health_check=(
HealthCheck.filter_too_much,
HealthCheck.too_slow,
),
)
settings.register_profile(
"dev",
max_examples=10,
suppress_health_check=(
HealthCheck.filter_too_much,
HealthCheck.too_slow,
),
)
hypothesis_profile = os.getenv("HYPOTHESIS_PROFILE", "default")
settings.load_profile(hypothesis_profile)
def pytest_report_header(config, startdir):
return [f"HYPOTHESIS_PROFILE: {hypothesis_profile}"]
```
#### File: kandelero/tests/test_is_trap.py
```python
import pytest
from kandelero import Candlestick
from kandelero.context import Bottom, MarketContext, TimeFrame, Top
from kandelero.patterns.comparators import is_bear_trap, is_bull_trap
def bottoms():
return
def test_is_bull_trap():
# EURUSD - FIFTEEN_MINUTES
previous = Candlestick(
open=1.13737,
high=1.13825,
low=1.13730,
close=1.13781,
timestamp="2021-11-30T14:45:00",
)
current = Candlestick(
open=1.13778,
high=1.13825,
low=1.13658,
close=1.13722,
timestamp="2021-11-30T15:00:00",
)
market_context = MarketContext(
symbol="EURUSD",
tops=[
Top(
value=1.13737,
value_range=(),
timeframe=TimeFrame.FIFTEEN_MINUTES,
candlestick=Candlestick(
open=1.13695,
high=1.13737,
low=1.13673,
close=1.13685,
timestamp="2021-11-18T18:00:00",
),
),
],
bottoms=[],
)
assert is_bull_trap(
previous=previous, current=current, market_context=market_context
)
def test_is_bear_trap():
# EURGBP - ONE_MINUTE
previous = Candlestick(
open=0.84984,
high=0.84987,
low=0.84979,
close=0.84982,
timestamp="2021-12-01T07:40:00",
)
current = Candlestick(
open=0.84982,
high=0.84990,
low=0.84981,
close=0.84987,
timestamp="2021-12-01T07:41:00",
)
market_context = MarketContext(
symbol="EURGBP",
tops=[],
bottoms=[
Bottom(
value=0.84981,
value_range=(),
timeframe=TimeFrame.FIFTEEN_MINUTES,
candlestick=Candlestick(
open=0.84992,
high=0.85112,
low=0.84981,
close=0.85109,
timestamp="2021-11-30T10:30:00",
),
),
],
)
assert is_bear_trap(
previous=previous, current=current, market_context=market_context
)
```
#### File: kandelero/tests/test_pattern_finder.py
```python
from decimal import Decimal
from unittest import mock
import pytest
from kandelero.candlestick import Candlestick
from kandelero.patterns import COMPARATORS, find_patterns
from kandelero.patterns.comparators import ComparatorResponse
@pytest.fixture
def false_comparators():
return [
mock.Mock(return_value=ComparatorResponse(found=False, pattern=None))
for __ in range(10)
]
@pytest.fixture
def impossible_value():
# candlesticks cannot have negative values
return Decimal("-99999")
@pytest.fixture
def dummy_candlestick(impossible_value):
return Candlestick(
high=impossible_value,
low=impossible_value,
open=impossible_value,
close=impossible_value,
)
def test_pattern_finder_no_pattern(dummy_candlestick):
current = previous = dummy_candlestick
result = find_patterns(comparators=COMPARATORS, previous=previous, current=current)
assert list(result) == []
def test_pattern_finder_at_least_one_match(false_comparators, dummy_candlestick):
current = previous = dummy_candlestick
always_true_comparator = mock.Mock(
return_value=ComparatorResponse(found=True, pattern=None)
)
result = find_patterns(
comparators=[always_true_comparator, *false_comparators],
previous=previous,
current=current,
)
result = list(result)
assert len(result) == 1
```
#### File: kandelero/tests/test_pattern_harami.py
```python
from decimal import Decimal
import pytest
from kandelero import Candlestick
from kandelero.patterns.comparators import (
is_bearish_harami,
is_bearish_harami_cross,
is_bullish_harami,
is_bullish_harami_cross,
)
@pytest.mark.bearish_pattern
@pytest.mark.happy_path
@pytest.mark.parametrize(
"previous, current",
(
[
Candlestick(
open=Decimal("14501.55000"),
high=Decimal("14517.15000"),
low=Decimal("14492.15000"),
close=Decimal("14517.15000"),
),
Candlestick(
open=Decimal("14513.95000"),
high=Decimal("14516.95000"),
low=Decimal("14507.45000"),
close=Decimal("14512.95000"),
),
],
),
ids=[
"Nasdaq 1 Minute: 2021-07-19 15:28 -> 15:29",
],
)
def test_is_bearish_harami(previous, current):
assert previous.is_bullish
assert current.is_bearish
assert is_bearish_harami(previous, current)
@pytest.mark.bullish_pattern
@pytest.mark.happy_path
@pytest.mark.parametrize(
"previous, current",
(
[
Candlestick(
open=Decimal("14498.25000"),
high=Decimal("14499.55000"),
low=Decimal("14487.05000"),
close=Decimal("14490.35000"),
),
Candlestick(
open=Decimal("14491.05000"),
high=Decimal("14496.85000"),
low=Decimal("14485.25000"),
close=Decimal("14491.45000"),
),
],
[
Candlestick(
open=Decimal("14503.15000"),
high=Decimal("14503.85000"),
low=Decimal("14493.95000"),
close=Decimal("14495.35000"),
),
Candlestick(
open=Decimal("14497.45000"),
high=Decimal("14506.45000"),
low=Decimal("14496.35000"),
close=Decimal("14499.45000"),
),
],
),
ids=[
"Nasdaq 1 Minute: 2021-07-19 19:59 -> 20:00",
"Nasdaq 1 Minute: 2021-07-19 20:49 -> 20:50",
],
)
def test_is_bullish_harami(previous, current):
assert previous.is_bearish
assert current.is_bullish
result = is_bullish_harami(previous, current)
assert result
@pytest.mark.bullish_pattern
@pytest.mark.happy_path
@pytest.mark.parametrize(
"previous, current",
(
[
Candlestick(
open=Decimal("14676.75000"),
high=Decimal("14680.15000"),
low=Decimal("14673.15000"),
close=Decimal("14673.85000"),
),
Candlestick(
open=Decimal("14673.85000"),
high=Decimal("14675.25000"),
low=Decimal("14667.35000"),
close=Decimal("14673.85000"),
),
],
),
ids=[
"Nasdaq 1 Minute: 2021-07-20 16:31 -> 16:32",
],
)
def test_is_bullish_harami_cross(previous, current):
assert previous.is_bearish
assert not current.is_bearish and not current.is_bullish
assert is_bullish_harami_cross(previous, current)
@pytest.mark.bearish_pattern
@pytest.mark.happy_path
@pytest.mark.parametrize(
"previous, current",
(
[
Candlestick(
open=Decimal("14552.15000"),
high=Decimal("14576.25000"),
low=Decimal("14552.15000"),
close=Decimal("14574.15000"),
),
# that's doji (neither bullish nor bearish by itself)
# but it will have a bearish connotation in this context
Candlestick(
open=Decimal("14570.45000"),
high=Decimal("14576.25000"),
low=Decimal("14559.45000"),
close=Decimal("14570.45000"),
),
],
[
Candlestick(
open=Decimal("14747.05000"),
high=Decimal("14750.75000"),
low=Decimal("14746.55000"),
close=Decimal("14750.75000"),
),
Candlestick( # this one is perfect
open=Decimal("14750.55000"),
high=Decimal("14751.85000"),
low=Decimal("14749.05000"),
close=Decimal("14750.55000"),
),
],
),
ids=[
"Nasdaq 1 Minute: 2021-07-19 14:44 -> 14:45",
"Nasdaq 1 Minute: 2021-07-20 17:58 -> 17:59",
],
)
def test_is_bearish_harami_cross(previous, current):
assert previous.is_bullish
assert not current.is_bearish and not current.is_bullish
assert is_bearish_harami_cross(previous, current)
``` |
{
"source": "jonasborges/ltd-invoice",
"score": 2
} |
#### File: ltd-invoice/ltd_invoice/gmail.py
```python
import base64
import logging
import os
from dataclasses import dataclass, field
from datetime import datetime
from functools import lru_cache
from typing import Any, Dict, Generator, Union
import dateutil.parser
import dateutil.tz
from google.auth.exceptions import RefreshError
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import Resource, build
@dataclass(frozen=True)
class EmailMessage:
id: str
thread_id: str
subject: str
body: str = field(repr=False)
attachment: bytes = field(repr=False)
sender: str
receiver: str
date: datetime
@staticmethod
def parse_date(value):
if "BST" in value:
BST = dateutil.tz.gettz("Europe/London")
return dateutil.parser.parse(value, tzinfos={"BST": BST})
else:
return datetime.strptime(value, os.environ["EMAIL_DATE_FORMAT"])
def __post_init__(self) -> None:
attributes_to_format = (
(
"date",
self.parse_date(
self.date,
),
),
)
for attr, value in attributes_to_format:
super().__setattr__(attr, value)
class GmailService:
def __init__(self) -> None:
logging.info("Creating GmailService")
self.service = self.build()
self.from_email_filter = os.environ["FROM_EMAIL_FILTER"]
logging.info("GmailService created!")
@staticmethod
def build_query(query_data) -> str:
return " ".join(
f"{field}:{value}"
for field, value in query_data.items()
if value is not None
)
def get_raw_messages(
self, last_email_processed: Union[datetime, None]
) -> Generator[Dict[str, Any], None, None]:
try:
after_date = last_email_processed.date().isoformat()
except AttributeError:
after_date = None
query_data = {
"after": after_date,
"from": os.environ["FROM_EMAIL_FILTER"],
}
message_response = (
self.service.users()
.messages()
.list(
userId=os.environ["GMAIL_USER_ID"],
labelIds=[os.environ["GMAIL_LABEL_4_INVOICES"]],
q=self.build_query(query_data),
)
.execute()
)
for msg in message_response.get("messages", []):
raw_message = (
self.service.users()
.messages()
.get(
id=msg["id"],
userId=os.environ["GMAIL_USER_ID"],
)
.execute()
)
if (
os.environ["MESSAGE_SNIPPET_FILTER"]
not in raw_message["snippet"]
):
continue
yield raw_message
def get_attachment(self, attachment_id: str, message_id: str) -> bytes:
raw_attachment = (
self.service.users()
.messages()
.attachments()
.get(
userId=os.environ["GMAIL_USER_ID"],
messageId=message_id,
id=attachment_id,
)
.execute()
)
return base64.urlsafe_b64decode(raw_attachment["data"])
def get_emails(
self, last_email_processed_date
) -> Generator[EmailMessage, None, None]:
logging.info("Retrieving emails since %s", last_email_processed_date)
yield from sorted(
(
EmailMessage(
body=raw_message["payload"]["body"],
id=raw_message["id"],
thread_id=raw_message["threadId"],
attachment=self.get_attachment(
attachment_id=self.get_attachment_id(raw_message),
message_id=raw_message["id"],
),
subject=self.get_attribute_from_header(
attribute="subject", message=raw_message
),
sender=self.get_attribute_from_header(
attribute="from", message=raw_message
),
receiver=self.get_attribute_from_header(
attribute="to", message=raw_message
),
date=self.get_attribute_from_header(
attribute="date", message=raw_message
),
)
for raw_message in self.get_raw_messages(
last_email_processed_date
)
),
key=lambda x: x.date,
)
@staticmethod
def get_attribute_from_header(
attribute: str, message: Dict[str, Any]
) -> str:
attr = attribute.lower()
for header in message["payload"]["headers"]:
if header["name"].lower() == attr:
return str(header["value"])
raise ValueError(f"{attribute} is not present")
@staticmethod
def get_attachment_id(message: Dict[str, Any]) -> str:
for part in message["payload"]["parts"]:
if part["mimeType"] == "application/pdf":
return str(part["body"]["attachmentId"])
raise ValueError("Message has no pdf attached")
@classmethod
@lru_cache(maxsize=1)
def build(cls) -> Resource:
# Call the Gmail API
return build("gmail", "v1", credentials=cls.get_credentials())
@classmethod
def get_credentials(cls):
# If modifying these scopes, delete the file token.json.
scopes = [os.environ["GOOGLE_API_SCOPE"]]
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists("token.json"):
creds = Credentials.from_authorized_user_file("token.json", scopes)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds = cls.refresh_creds(creds, scopes)
else:
creds = cls.generate_creds(scopes)
return creds
@classmethod
def refresh_creds(cls, creds, scopes):
try:
creds.refresh(Request())
except RefreshError:
os.unlink("token.json")
creds = cls.generate_creds(scopes)
return creds
@classmethod
def generate_creds(cls, scopes):
flow = InstalledAppFlow.from_client_secrets_file(
"credentials.json", scopes
)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open("token.json", "w") as token:
token.write(creds.to_json())
return creds
``` |
{
"source": "JonasBrusokas/ModelarDB-ext",
"score": 3
} |
#### File: ModelarDB-ext/forecasting/forecasting.py
```python
import pandas as pd
from dataset import *
from models import *
from utils import *
from sklearn.metrics import mean_squared_error
if __name__ == '__main__':
def test_model(model, dl, scaler: StandardScaler, metric = root_mean_squared_error()):
list_test_rmse = []
list_ys = []
for x, y in dl:
y_hat_proper = scaler.inverse_transform(model(x).detach())
y_proper = scaler.inverse_transform(y)
list_test_rmse.append( np.sqrt(mean_squared_error(y_hat_proper, y_proper))
/ len(y_proper) if len(y_proper) != 0 else 1
)
list_ys.append( (y_proper, y_hat_proper) )
return float(np.mean(np.stack(list_test_rmse))), list_ys
def train_model(df: pd.DataFrame,
model: nn.Module,
memory: int,
batch_size: int,
error_bound: int,
flatten_xs: bool,
output_parent_folder: str,
output_name: str,
):
# df = df.head(100000) # TODO: REMOVE!
dm = DataModule(df,
memory=memory,
horizon=horizon,
batch_size=batch_size,
flatten_xs=flatten_xs,
error_bound=error_bound)
raw_dm = DataModule(df,
memory=memory,
horizon=horizon,
batch_size=batch_size,
flatten_xs=flatten_xs,
error_bound=None)
train_dataloader = dm.train_dataloader()
device = torch.device('cuda')
cpu_device = torch.device('cpu')
model = model.to(device)
loss_foo = root_mean_squared_error()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
scheduler_lr = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=lr_gamma)
###
# Training loop
before_training = DateUtils.now()
for epoch in range(0, epochs):
loss_list = []
for x, y in train_dataloader:
x = x.to(device)
y = y.to(device)
optimizer.zero_grad()
y_hat = model(x)
loss = loss_foo(y, y_hat)
loss_list.append(loss.cpu().detach().numpy())
loss.backward()
optimizer.step()
scheduler_lr.step()
epoch_loss = np.mean(np.stack(loss_list))
print(f"Loss at epoch={epoch+1}: {float(epoch_loss)}, took: {DateUtils.now() - before_training}")
model = model.to(cpu_device)
model.eval()
test_rmse, raw_test_rmse = -1.0, -1.0
for y_type in ["raw", "compressed"]:
if (y_type == "raw"):
test_rmse, list_ys = test_model(model, dm.test_dataloader(), scaler=dm.scaler)
else:
raw_test_rmse, list_ys = test_model(model, raw_dm.test_dataloader(), scaler=dm.scaler)
total_test_ys = np.concatenate(list(map(lambda array: np.stack(array).reshape(-1, 60), list_ys)), axis=0)
columns = [f"y_{h_i}" for h_i in range(horizon)] + [f"y_hat_{h_i}" for h_i in range(horizon)]
ys_df = pd.DataFrame(total_test_ys, columns=columns)
ys_output_file_path = f"{os.path.join(output_parent_folder, f'{output_name}_y_outputs_{y_type}.csv')}"
ys_df.to_csv(ys_output_file_path)
# print(f"Test RMSE: {test_rmse}")
return model, float(epoch_loss), float(test_rmse), float(raw_test_rmse)
horizon = 30
memory = 60
batch_size = 512
hidden_size = 16
epochs = 15
learning_rate = 0.005
lr_gamma = 0.9
before_everything = DateUtils.now()
output_super_parent_folder = f"{os.path.join(FileUtils.project_root_dir(), 'results', 'forecasting_results', f'{before_everything.month}{before_everything.day}')}"
output_parent_folder = f"{os.path.join(output_super_parent_folder, f'{before_everything.hour}-{before_everything.minute}')}"
output_csv_path = f"{os.path.join(output_parent_folder, f'output_{before_everything.month}-{before_everything.day}_{before_everything.hour}-{before_everything.minute}.csv')}"
FileUtils.create_dir(output_parent_folder)
# TODO: fill dynamically
parquet_file_paths = [
# f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'lg_only','house_1-channel_1_output_data_points.parquet')}",
# f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'lg_v3_d5', 'house_1-channel_1_output_data_points.parquet')}",
# f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'lg_v3_d10', 'house_1-channel_1_output_data_points.parquet')}",
# f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'lg_v3_d25', 'house_1-channel_1_output_data_points.parquet')}",
# f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'pmc_only', 'house_1-channel_1_output_data_points.parquet')}",
f"{os.path.join(FileUtils.project_root_dir(), 'data', 'REDD-Cleaned-f32', 'swing', 'house_1-channel_1_output_data_points.parquet')}",
]
error_bound_list = [0, 1, 2, 5, 10, 25, 50, None]
model_type_list = ['turbo_lstm']
hidden_size_list = [hidden_size]
# hidden_size_list = [4, 8, 16, 32, 48, 80] # TODO: remove for LR
total_run_count = len(error_bound_list) * len(model_type_list) * len(parquet_file_paths) * len(hidden_size_list)
current_run = 0
for parquet_path in parquet_file_paths:
df = pd.read_parquet(parquet_path)
for error_bound in error_bound_list:
for model_type in model_type_list:
for hidden_size in hidden_size_list:
current_run+=1
print(f"Current run: {current_run} / {total_run_count} | Date: {DateUtils.now()}")
# if (current_run <= 21):
# print("Skipping...")
# continue
if (model_type == 'lstm'):
model = BasicLSTM_simple(hidden_size=hidden_size,
output_length=horizon)
elif(model_type == 'lr'):
model = LinearRegression(memory, horizon)
elif(model_type == 'turbo_lstm'):
model = LSTM_with_skip(memory_length=memory,
hidden_size=hidden_size,
output_length=horizon)
else:
raise ValueError(f"Model type: '{model_type}' is unsupported!")
flatten_xs = True if model_type in ['lr'] else False
dataset_name = str(Path(parquet_path).parent.name)
before_training = DateUtils.now()
trained_model, train_rmse, rmse, raw_rmse = train_model(
df,
model=model,
memory=memory,
batch_size=batch_size,
error_bound=error_bound,
flatten_xs=flatten_xs,
output_parent_folder=output_parent_folder,
output_name=f"{model_type}_{dataset_name}_E{error_bound if not None else 'RAW'}"
)
output_dict = {
'dataset_name': dataset_name,
'model_type': model_type,
'error_bound': error_bound if (error_bound is not None) else -1,
'epochs': epochs,
'memory': memory,
'horizon': horizon,
'batch_size': batch_size,
'hidden_size': hidden_size if (model_type != 'lr') else -1,
'train_rmse': train_rmse,
'rmse': rmse,
'rmse_on_raw': raw_rmse,
'train_start': before_training,
'lr': learning_rate,
'lr_gamma': lr_gamma,
}
output_csv_df = None
try:
output_csv_df = pd.read_csv(output_csv_path)
current_output_df = pd.DataFrame(output_dict, index=[1])
output_csv_df = pd.concat([output_csv_df, current_output_df])
print("Concatenating")
except Exception as e:
print("Output file does not exist yet!")
output_csv_df = pd.DataFrame(output_dict, index=[1])
output_csv_df.to_csv(path_or_buf=output_csv_path, index=False, header=True)
print(f"Training and inference took: {DateUtils.now() - before_training}")
```
#### File: ModelarDB-ext/scripts/collect_compression_details.py
```python
import json
from pathlib import Path
import pandas as pd
def flatten(list_to_flatten: list) -> list:
return [item for sublist in list_to_flatten for item in sublist]
def natural_sort(list_to_sort: [str]) -> [str]:
""" Sort the given list of strings in the way that humans expect. """
import copy, re
copied_list = copy.deepcopy(list_to_sort)
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
copied_list.sort(key=alphanum_key)
return copied_list
def collect_to_csv(path_json:str):
json_path_obj = Path(path_json)
json_path_obj_name_without_suffix = (json_path_obj.name)[:-5]
print(f"JSON name: '{json_path_obj_name_without_suffix}'")
with open(path_json, "r") as read_file:
json_object = json.load(read_file)
keys = list(json_object.keys())
value_keys = list(filter(lambda key: key.startswith("value-") and not key.endswith("-R"), keys))
sorted_value_keys = natural_sort(value_keys)
# sorted_value_keys = list(map(lambda item: item[len("value-"):], sorted_value_keys))
result_dict_list = []
for key in sorted_value_keys:
result_dict = {}
selected_error_obj = json_object[key]
error_bound = int(key[len("value-E"):])
result_dict['name'] = f"{json_path_obj_name_without_suffix}"
result_dict['error_bound'] = error_bound
result_dict['parquet_size_raw'] = selected_error_obj['segments']['parquet']['None']
result_dict['parquet_size_gzip'] = selected_error_obj['segments']['parquet']['gzip']
result_dict['mean_absolute_error'] = selected_error_obj['metrics']['Mean_Absolute_Value']
result_dict['mean_absolute_percentage_error'] = selected_error_obj['metrics']['Mean_Absolute_Percentage_Error']
result_dict_list.append(result_dict)
global raw_error_obj
raw_error_obj = json_object['value-R']
raw_result_dict = {}
raw_result_dict['name'] = f"raw"
raw_result_dict['error_bound'] = 0
raw_result_dict['parquet_size_raw'] = raw_error_obj['data_points']['parquet']['None']
raw_result_dict['parquet_size_gzip'] = raw_error_obj['data_points']['parquet']['gzip']
raw_result_dict['mean_absolute_error'] = 0.0
raw_result_dict['mean_absolute_percentage_error'] = 0.0
return json_object, result_dict_list, raw_result_dict
#%%
if __name__ == '__main__':
import os
from utils import *
compression_details_path = f"{os.path.join(FileUtils.project_root_dir(), 'results', 'compression_details')}"
compression_detail_files = list(filter(lambda file_path: file_path.endswith(".json"), os.listdir(compression_details_path)))
result_dict_list = []
for file_path in compression_detail_files:
absolute_path = os.path.join(compression_details_path, file_path)
obj, result_dict, raw_result_dict = collect_to_csv(absolute_path)
result_dict_list.append(result_dict)
result_dict_list.append([raw_result_dict])
result_dict_list_flattened = flatten(result_dict_list)
result_df = pd.DataFrame(result_dict_list_flattened)
result_df.to_csv(
os.path.join(compression_details_path, "output.csv"),
index=False,
)
# json_path = "/Users/jonasb/repos/ModelarDB-ext/results/compression_details/lost_gorilla_v1.json"
# obj, result_dict = collect_to_csv(json_path)
``` |
{
"source": "jonasbusk/pdpiper",
"score": 3
} |
#### File: pdpiper/pdpiper/pipeline.py
```python
from .base import BaseStep, TransformerMixin
class Pipeline(BaseStep, TransformerMixin):
"""Pipeline of steps."""
def __init__(self, steps=[]):
self.steps = steps
def fit(self, df):
"""Fit each step in sequence."""
self.fit_transform(df)
return self
def get_params(self):
"""Get params of each step as list."""
return [s.get_params() for s in self.steps]
def set_params(self, params_list):
"""Set params of each step from list of params."""
assert len(self.steps) == len(params_list)
for step, params in zip(self.steps, params_list):
step.set_params(params)
def transform(self, df):
"""Apply each step in sequence."""
for step in self.steps:
df = step.transform(df)
return df
def fit_transform(self, df):
"""Fit and apply each step in sequence."""
for step in self.steps:
df = step.fit_transform(df)
return df
``` |
{
"source": "JonasCaetanoSz/Bot_dizu",
"score": 3
} |
#### File: JonasCaetanoSz/Bot_dizu/explore.py
```python
import os
import pickle
import time
from selenium.common.exceptions import WebDriverException
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
def explore(login):
if f'{login}.pkl' in os.listdir('Cache/Dizu'):
return True
def make_tasks(driver):
task_number = 0
script_insta = open ('Scripts/insta.js' , 'r').read()
advance = None
while True:
time.sleep(35) # IS THE INTERVAL.
try:
driver.execute_script('document.querySelector("#conectar_step_4 > p").click()')
task_number = task_number + 1
print(f'\n\033[1;92m[INFO] Realizando Tarefa Nº {task_number} Agora.')
advance = True
except WebDriverException as Sem_tarefas :
print(f'\n\033[1;33m[INFO] Sem tarefas Agora,Buscando novamente...')
driver.execute_script("document.querySelector('#iniciarTarefas').click()")
advance = False
if advance:
driver.switch_to.window(driver.window_handles[1])
time.sleep(8.2)
driver.execute_script(script_insta)
time.sleep(11)
driver.close()
driver.switch_to.window(driver.window_handles[0])
time.sleep(1)
driver.execute_script('document.querySelector("#conectar_step_5 > button").click()')
def login(user,hidden_browser):
if hidden_browser == 's' or hidden_browser == 'S' or hidden_browser == 'sim':
options = webdriver.ChromeOptions()
options.add_argument("--headless")
driver = webdriver.Chrome(ChromeDriverManager().install(), chrome_options=options)
else :
driver = webdriver.Chrome(ChromeDriverManager().install())
os.system('clear')
print('\n[INFO] Fazendo Login na dizu, aguarde...')
cookie_dizu = pickle.load(open(f"Cache/Dizu/{user}.pkl", "rb"))
driver.get('https://dizu.com.br/login')
for cookie in cookie_dizu :
driver.add_cookie(cookie)
driver.execute_script('window.location.href = "https://dizu.com.br/painel/conectar" ')
if driver.current_url == 'https://dizu.com.br/painel/conectar':
os.system('clear')
driver.execute_script('scroll(0,780); document.body.style.zoom = 0.75')
print('\033[1;92m[INFO] Login Realizado com Sucesso\n')
return driver
else :
print('[ERROR] Este Cookie Não é mais valido. ')
return driver
def save_cookie_dizu(driver,user):
pickle.dump(driver.get_cookies() , open(f'Cache/Dizu/{user}.pkl' , 'wb'))
def save_cookie_instagram(driver,user):
pickle.dump(driver.get_cookies() , open(f'Cache/Instagram/{user}.pkl' , 'wb'))
def login_in_insta(user,driver,selected):
if f'{user}.pkl' in os.listdir('Cache/Instagram'):
print(f'\n[INFO] Fazendo Login com o Perfil {user}')
driver.execute_script('window.open("https://www.instagram.com/")')
driver.switch_to.window(driver.window_handles[1])
cookie_insta = pickle.load(open(f"Cache/Instagram/{user}.pkl", "rb"))
time.sleep(2)
for cookie in cookie_insta:
driver.add_cookie(cookie)
time.sleep(2)
driver.execute_script('window.location.href = "https://www.instagram.com" ')
time.sleep(1)
os.system('clear')
print(f'\n\033[1;92m[INFO] Login Realizado,iniciando tarefas!')
driver.execute_script('window.close()')
driver.switch_to.window(driver.window_handles[0])
driver.execute_script(f'document.querySelector("#instagram_id").selectedIndex = {selected}')
driver.maximize_window()
driver.execute_script("document.querySelector('#iniciarTarefas').click()")
make_tasks(driver)
else:
password = input(f'\nDigite a senha de {user} : ')
os.system('clear')
print('\n\033[1;92m[INFO] Fazendo Login no instagram..')
driver.execute_script('window.open("https://www.instagram.com/")')
driver.switch_to.window(driver.window_handles[1])
driver.maximize_window()
time.sleep(8)
driver.find_element_by_xpath('//*[@id="loginForm"]/div/div[1]/div/label/input').send_keys(user)
time.sleep(1)
driver.find_element_by_xpath('//*[@id="loginForm"]/div/div[2]/div/label/input').send_keys(password)
time.sleep(1)
driver.find_element_by_xpath('//*[@id="loginForm"]/div/div[3]/button/div').click()
time.sleep(5)
if driver.current_url == 'https://www.instagram.com/accounts/onetap/?next=%2F':
os.system('clear')
print('\n[INFO] Login Realizado,iniciando tarefas..')
save_cookie_instagram(driver,user)
driver.execute_script('window.close()')
driver.switch_to.window(driver.window_handles[0])
driver.execute_script(f'document.querySelector("#instagram_id").selectedIndex = {selected}')
driver.maximize_window()
driver.execute_script("document.querySelector('#iniciarTarefas').click()")
make_tasks(driver)
else :
os.system('clear')
driver.close()
driver.switch_to.window(driver.window_handles[0])
print(f'\033[1;91m[ERROR] login ou senha invalidos tente novamente : \n\n\n')
login_in_insta(user,driver,selected)
``` |
{
"source": "Jonas-Carvalho/rnnt-speech-recognition",
"score": 3
} |
#### File: utils/data/common_voice.py
```python
from pydub import AudioSegment
import os
import tensorflow as tf
from . import common as cm_data_utils
def mp3_to_wav(filepath):
audio_segment = AudioSegment.from_mp3(filepath)
audio_segment.export('{}.wav'.format(filepath[:-4]), format='wav')
os.remove(filepath)
def tf_file_exists(filepath):
return tf.py_function(lambda x: os.path.exists(x.numpy().decode('utf8')), inp=[filepath], Tout=tf.bool)
def tf_parse_line(line, datapath):
line_sections = tf.strings.split(line, '\t')
audio_fn = line_sections[1]
transcription = line_sections[2]
audio_filepath = tf.strings.join([datapath, 'clips', audio_fn], '/')
if tf.strings.regex_full_match(audio_fn, '(.*)\\.mp3'):
wav_filepath = tf.strings.substr(audio_filepath, 0, tf.strings.length(audio_filepath) - 4) + '.wav'
if tf.logical_not(tf_file_exists(wav_filepath)):
tf.py_function(lambda x: mp3_to_wav(x.numpy().decode('utf8')),
inp=[audio_filepath], Tout=[])
audio_filepath = wav_filepath
audio, sr = cm_data_utils.tf_load_audio(audio_filepath)
return audio, sr, transcription
def _create_dataset(path, name, max_data=None):
dataset = tf.data.TextLineDataset(
[os.path.join(path, '{}.tsv'.format(name))])
dataset = dataset.skip(1)
dataset = dataset.map(lambda line: tf_parse_line(line, path),
num_parallel_calls=tf.data.experimental.AUTOTUNE)
if max_data is not None:
dataset = dataset.take(max_data)
return dataset
def create_datasets(path, max_data=None):
train_dataset = _create_dataset(path, 'train',
max_data=max_data)
dev_dataset = _create_dataset(path, 'dev',
max_data=max_data)
return train_dataset, dev_dataset
```
#### File: Jonas-Carvalho/rnnt-speech-recognition/utils.py
```python
BLANK_LABEL = '<blank>'
SOS_LABEL = '<sos>'
def init_vocab():
"""Initialize text to ids vocabulary
Returns:
Vocabulary
"""
alphabet = 'abcdefghijklmnopqrstuvwxyz '
specials = [BLANK_LABEL, SOS_LABEL]
return {c: i for c, i in zip(specials + [c for c in alphabet],
range(len(alphabet) + len(specials)))}
``` |
{
"source": "jonascheng/services-lib",
"score": 2
} |
#### File: services-lib/soocii_services_lib/click.py
```python
import calendar
import configparser
import datetime
import logging
import os
import platform
import subprocess
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from io import BytesIO
import boto3
import click
from fabric.api import *
logger = logging.getLogger()
log_formatter = logging.Formatter(
'[%(asctime)-15s][%(levelname)-5.5s][%(filename)s][%(funcName)s#%(lineno)d] %(message)s'
)
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)
def _build_cmd_by_platform(cmd):
if 'darwin' in platform.platform().lower(): # Mac
return cmd
elif os.geteuid() == 0: # root user
return cmd
else:
return 'sudo ' + cmd
def bash(cmd):
""" Run bash with colorized and use `sudo` based on platform.
Run without sudo on Mac; Otherwise, run with sudo.
:param cmd: commands to execute in bash.
:return:
"""
cmd = _build_cmd_by_platform(cmd)
click.echo(click.style('Run ', fg='green', bold=True) + click.style(cmd))
return subprocess.call(cmd, shell=True)
def build_soocii_cli(ci_tools):
"""Build click commands group for CI
This function will build a click commands group. This click command group will run commands by implementations in
`ci_tools`.
If you need to customized some CI commands, you can override methods in CiTools.
:param ci_tools: A instance of CiTools.
:return: A Click command group with commands which are needed by CI.
.. seealso:: :class:`click.CiToolsAbc`
.. seealso:: :class:`click.CiTools`
"""
if not isinstance(ci_tools, CiToolsAbc):
raise ValueError(
"type({}) is not CiToolsAbc. You need to use `CiTools` or implement `CiToolsAbc`.".format(ci_tools)
)
@click.group(chain=True)
def soocii_cli():
pass
@soocii_cli.command('docker-login', short_help='Let docker client login to ECR')
def docker_login():
ci_tools.docker_login()
@soocii_cli.command('build', short_help='Build web docker image on local')
def build():
"""
Build web docker image on local
"""
ci_tools.build()
@soocii_cli.command('build-and-push', short_help='Build and push web docker image to private registry')
def build_and_push():
"""
Build and push web docker image to private registry
"""
ci_tools.build_and_push()
@soocii_cli.command('deploy-to-integ', short_help='Deployment to integration server.')
def deploy_to_integration():
ci_tools.deploy_to_integ()
return soocii_cli
class CiToolsAbc(metaclass=ABCMeta):
"""An interface of CiTool
You can inherit this interface to implement your own CiTool and build click command group by passing your CiTool to
:func:`~click.build_soocii_cli`
"""
@abstractmethod
def docker_login(self):
pass
@abstractmethod
def build(self):
pass
@abstractmethod
def build_and_push(self):
pass
@abstractmethod
def deploy_to_integ(self):
pass
class CiTools(CiToolsAbc):
"""Implementation of click commands
This :class:`click.CiTools` collect some simple implementations which needed by Jenkins for CI/CD.
"""
def __init__(self, repo, aws_account='710026814108', aws_region='ap-northeast-1'):
"""Some common functions which may be used by soocii.py script.
:param repo: Repository name.
:param aws_account: AWS account ID.
:param aws_region: AWS region.
"""
self.repo = repo
self.aws = namedtuple('Aws', ['account', 'region'])(aws_account, aws_region)
def docker_login(self):
"""
Login AWS ECR
"""
success, resp = self._get_ecr_login()
if not success:
logging.error("fail to login docker.")
exit("fail to login docker.")
bash(resp)
def build(self):
"""
Build docker image on local
"""
version, label = self._get_docker_ver_label()
self.build_docker_image(version, label)
logger.info('Build image version %s with label %s done', version, label)
return version, label
def build_and_push(self):
"""
Build and push docker image to private registry on AWS ECR
"""
success, response = self._get_ecr_login()
if not success:
exit()
bash(response)
version, label = self._get_docker_ver_label()
self.build_docker_image(version, label)
self.push_docker_image(label)
logger.info('Build and push image version %s with label %s to registry done', version, label)
def deploy_to_integ(self):
"""
Deploy docker to integration server
"""
ip, key = self._get_integ_server_info()
with settings(host_string=ip, user='ubuntu', key=key):
with cd('/home/ubuntu/iron'):
run('bash -c "./deploy.py update {}"'.format(self.repo))
logger.info('Deploy done.')
@staticmethod
def _get_integ_server_info():
f_obj = BytesIO() # we don't want to store secret data on disk
s3 = boto3.resource('s3')
obj = s3.Object('soocii-secret-config-tokyo', 'integ_conn_info')
obj.download_fileobj(f_obj)
f_obj.seek(0)
config = configparser.ConfigParser()
config.read_string(f_obj.getvalue().decode('utf-8'))
f_obj.close()
return config.get('DEFAULT', 'IP'), config.get('DEFAULT', 'SSH_KEY')
@staticmethod
def _get_timestamp():
"""
Get timestamp from current UTC time
"""
utc_time = datetime.datetime.utcnow()
return calendar.timegm(utc_time.timetuple())
@staticmethod
def _get_docker_ver_label():
build_number_from_jenkins = os.getenv('BUILD_NUMBER', False)
git_branch = os.getenv('GIT_BRANCH', None)
if git_branch is not None:
git_branch = git_branch.split('/')[1]
if git_branch is not None and 'develop' in git_branch:
build_number_from_jenkins = False
git_branch = None
logger.info('Current branch is %s', git_branch)
if not build_number_from_jenkins:
version = '%s' % CiTools._get_timestamp()
else:
version = build_number_from_jenkins
if git_branch is None:
label = 'integ_latest'
else:
label = '%s_%s' % (git_branch, version)
return version, label
def _get_ecr_login(self):
"""
Get docker login user, and password from aws cli
"""
cmd = 'aws ecr get-login --no-include-email --region %s' % self.aws.region
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
response = ''
success = True
for line in p.stdout:
response += line.decode('utf-8')
for line in p.stderr:
print('Can not get docker login information : %s' %
line.decode('utf-8'))
success = False
p.wait()
return success, response
def build_docker_image(self, version, label):
"""
Build docker image
"""
image_name = '%s.dkr.ecr.%s.amazonaws.com/%s:%s' % (
self.aws.account, self.aws.region, self.repo, version)
bash('docker build --build-arg version=%s -t %s .' % (version, image_name))
image_with_label = '%s.dkr.ecr.%s.amazonaws.com/%s:%s' % (
self.aws.account, self.aws.region, self.repo, label)
bash('docker tag %s %s' % (image_name, image_with_label))
def push_docker_image(self, label):
"""
Push docker image with latest label to AWS ECR registry
"""
aws_registry_repo = '%s.dkr.ecr.%s.amazonaws.com/%s:%s' % (
self.aws.account, self.aws.region, self.repo, label)
bash('docker push %s' % aws_registry_repo)
```
#### File: services-lib/soocii_services_lib/tokens.py
```python
import binascii
import json
import time
import jsonschema
from .crypter import AESCipher
from .exceptions import AccessTokenValidationError, RefreshTokenValidationError, TokenExpiredError, TokenSchemaError
class BaseToken(dict):
_schema = {}
def is_valid(self, age=None, raise_exception=False):
try:
jsonschema.validate(self, self._schema)
if age and ('timestamp' not in self or self['timestamp'] + age < int(time.time())):
msg = 'timestamp {} is expired'.format(self.get("timestamp"))
raise TokenExpiredError(msg)
except jsonschema.exceptions.ValidationError as e:
if raise_exception:
raise TokenSchemaError(str(e))
except TokenExpiredError:
if raise_exception:
raise
else:
return True
return False
class AccessToken(BaseToken):
ROLE_USER = 'user'
ROLE_BACKSTAGE = 'backstage'
ROLE_SERVICE = 'service'
_schema = {
'definitions': {
'basic': {
'type': 'object',
'properties': {
'timestamp': {
'type': 'integer'
}
}
},
ROLE_USER: {
'type': 'object',
'properties': {
'role': {
'type': 'string',
'enum': [ROLE_USER]
},
'pid': {
'type': 'string'
},
'id': {
'type': 'integer'
},
'soocii_id': {
'type': 'string'
},
'uid': {
'type': 'string',
'pattern': '^[0-9a-fA-F]{32}$'
}
},
'required': ['pid', 'id', 'soocii_id', 'uid']
},
ROLE_BACKSTAGE: {
'type': 'object',
'properties': {
'role': {
'type': 'string',
'enum': [ROLE_BACKSTAGE]
},
'id': {
'type': 'integer'
}
},
'required': ['id']
},
ROLE_SERVICE: {
'type': 'object',
'properties': {
'role': {
'type': 'string',
'enum': [ROLE_SERVICE]
},
'name': {
'type': 'string'
}
},
'required': ['name']
},
},
'allOf': [
{
'#ref': '#/definitions/basic'
},
{
'oneOf': [
{
'$ref': '#/definitions/user'
}, {
'$ref': '#/definitions/backstage'
}, {
'$ref': '#/definitions/service'
}
]
}
],
'required': ['role', 'timestamp']
}
@property
def role(self):
return self.get('role')
def is_role(self, role):
return self.role == role
class RefreshToken(BaseToken):
_schema = {
'type': 'object',
'properties': {
'timestamp': {
'type': 'integer'
},
'access_token': {
'type': 'string'
}
},
'required': ['timestamp', 'access_token']
}
class AccessTokenCryper(object):
age = 43200
exception = AccessTokenValidationError
_token_cls = AccessToken
def __init__(self, key, age=None):
key = binascii.unhexlify(key)
self.cipher = AESCipher(key)
if age:
self.age = age
def _encode(self, raw):
if isinstance(raw, str):
raw = raw.encode('utf-8')
return self.cipher.encrypt(raw)
def _decode(self, data):
# convert the pre-defined secret from hex string.
if isinstance(data, str):
data = data.encode('utf-8')
return self.cipher.decrypt(data)
def dumps(self, data=None, **kwargs):
"""
Generate token from encrypting the given data and keyword arguments. data should be a dict
"""
if not isinstance(data, dict):
data = {}
data.update(kwargs)
# append timestamp
data.update(timestamp=int(time.time()))
token = self._token_cls(data)
token.is_valid(raise_exception=True)
return self._encode(json.dumps(token))
def loads(self, token, valid_age=True):
"""
Load and decrypt token
"""
try:
token = self._token_cls(json.loads(self._decode(token).decode('utf-8')))
token.is_valid(self.age if valid_age else None, raise_exception=True)
except ValueError:
raise self.exception('invalid token format')
return token
def _get_specific_token(role):
def _wrapper(self, **kwargs):
mandatory_keys = self._token_cls._schema['definitions'][role]['required']
if any(k not in kwargs for k in mandatory_keys):
msg = '{} are required'.format(', '.join(mandatory_keys))
raise TokenSchemaError(msg)
kwargs['role'] = role
return self.dumps(kwargs).decode('utf-8')
return _wrapper
_get_user_token = _get_specific_token(_token_cls.ROLE_USER)
get_backstage_token = _get_specific_token(_token_cls.ROLE_BACKSTAGE)
get_service_token = _get_specific_token(_token_cls.ROLE_SERVICE)
def get_user_token(self, **kwargs):
if 'lang' not in kwargs:
kwargs['lang'] = 'EN-US'
return self._get_user_token(**kwargs)
class RefreshTokenCryper(AccessTokenCryper):
age = 604800
exception = RefreshTokenValidationError
_token_cls = RefreshToken
def get_token(self, access_token):
return self.dumps({'access_token': access_token}).decode('utf-8')
```
#### File: services-lib/soocii_services_lib/util.py
```python
import urllib.request
from time import sleep
def wait_for_internet_connection(host, port):
"""Wait for target until target is able to be connected"""
target = host + ':' + str(port)
print("Wait for {} ready".format(target))
count = 0
while True:
try:
urllib.request.urlopen('http://' + target, timeout=1)
return target
except Exception as e:
dots = ''
for i in range(3):
dots += '.' if i <= count % 3 else ' '
print("Still waiting for {} {}".format(target, dots), end='\r')
count += 1
if type(e) is urllib.request.HTTPError:
return target
sleep(1)
pass
``` |
{
"source": "jonaschn/networkx",
"score": 4
} |
#### File: networkx/algorithms/clique.py
```python
from collections import deque
from itertools import chain
from itertools import combinations
from itertools import islice
import networkx as nx
from networkx.utils import not_implemented_for
__all__ = [
"find_cliques",
"find_cliques_recursive",
"make_max_clique_graph",
"make_clique_bipartite",
"graph_clique_number",
"graph_number_of_cliques",
"node_clique_number",
"number_of_cliques",
"cliques_containing_node",
"enumerate_all_cliques",
"max_weight_clique",
]
@not_implemented_for("directed")
def enumerate_all_cliques(G):
"""Returns all cliques in an undirected graph.
This function returns an iterator over cliques, each of which is a
list of nodes. The iteration is ordered by cardinality of the
cliques: first all cliques of size one, then all cliques of size
two, etc.
Parameters
----------
G : NetworkX graph
An undirected graph.
Returns
-------
iterator
An iterator over cliques, each of which is a list of nodes in
`G`. The cliques are ordered according to size.
Notes
-----
To obtain a list of all cliques, use
`list(enumerate_all_cliques(G))`. However, be aware that in the
worst-case, the length of this list can be exponential in the number
of nodes in the graph (for example, when the graph is the complete
graph). This function avoids storing all cliques in memory by only
keeping current candidate node lists in memory during its search.
The implementation is adapted from the algorithm by Zhang, et
al. (2005) [1]_ to output all cliques discovered.
This algorithm ignores self-loops and parallel edges, since cliques
are not conventionally defined with such edges.
References
----------
.. [1] <NAME>, <NAME>., <NAME>., <NAME>.,
<NAME>., <NAME>.,
"Genome-Scale Computational Approaches to Memory-Intensive
Applications in Systems Biology".
*Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005
Conference, pp. 12, 12--18 Nov. 2005.
<https://doi.org/10.1109/SC.2005.29>.
"""
index = {}
nbrs = {}
for u in G:
index[u] = len(index)
# Neighbors of u that appear after u in the iteration order of G.
nbrs[u] = {v for v in G[u] if v not in index}
queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G)
# Loop invariants:
# 1. len(base) is nondecreasing.
# 2. (base + cnbrs) is sorted with respect to the iteration order of G.
# 3. cnbrs is a set of common neighbors of nodes in base.
while queue:
base, cnbrs = map(list, queue.popleft())
yield base
for i, u in enumerate(cnbrs):
# Use generators to reduce memory consumption.
queue.append(
(
chain(base, [u]),
filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)),
)
)
@not_implemented_for("directed")
def find_cliques(G):
"""Returns all maximal cliques in an undirected graph.
For each node *v*, a *maximal clique for v* is a largest complete
subgraph containing *v*. The largest maximal clique is sometimes
called the *maximum clique*.
This function returns an iterator over cliques, each of which is a
list of nodes. It is an iterative implementation, so should not
suffer from recursion depth issues.
Parameters
----------
G : NetworkX graph
An undirected graph.
Returns
-------
iterator
An iterator over maximal cliques, each of which is a list of
nodes in `G`. The order of cliques is arbitrary.
See Also
--------
find_cliques_recursive
A recursive version of the same algorithm.
Notes
-----
To obtain a list of all maximal cliques, use
`list(find_cliques(G))`. However, be aware that in the worst-case,
the length of this list can be exponential in the number of nodes in
the graph. This function avoids storing all cliques in memory by
only keeping current candidate node lists in memory during its search.
This implementation is based on the algorithm published by Bron and
Kerbosch (1973) [1]_, as adapted by <NAME> and Takahashi
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It
essentially unrolls the recursion used in the references to avoid
issues of recursion stack depth (for a recursive implementation, see
:func:`find_cliques_recursive`).
This algorithm ignores self-loops and parallel edges, since cliques
are not conventionally defined with such edges.
References
----------
.. [1] <NAME>. and <NAME>.
"Algorithm 457: finding all cliques of an undirected graph".
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
<http://portal.acm.org/citation.cfm?doid=362342.362367>
.. [2] <NAME>, <NAME>, <NAME>,
"The worst-case time complexity for generating all maximal
cliques and computational experiments",
*Theoretical Computer Science*, Volume 363, Issue 1,
Computing and Combinatorics,
10th Annual International Conference on
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
<https://doi.org/10.1016/j.tcs.2006.06.015>
.. [3] <NAME>, <NAME>,
"A note on the problem of reporting maximal cliques",
*Theoretical Computer Science*,
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
<https://doi.org/10.1016/j.tcs.2008.05.010>
"""
if len(G) == 0:
return
adj = {u: {v for v in G[u] if v != u} for u in G}
Q = [None]
subg = set(G)
cand = set(G)
u = max(subg, key=lambda u: len(cand & adj[u]))
ext_u = cand - adj[u]
stack = []
try:
while True:
if ext_u:
q = ext_u.pop()
cand.remove(q)
Q[-1] = q
adj_q = adj[q]
subg_q = subg & adj_q
if not subg_q:
yield Q[:]
else:
cand_q = cand & adj_q
if cand_q:
stack.append((subg, cand, ext_u))
Q.append(None)
subg = subg_q
cand = cand_q
u = max(subg, key=lambda u: len(cand & adj[u]))
ext_u = cand - adj[u]
else:
Q.pop()
subg, cand, ext_u = stack.pop()
except IndexError:
pass
# TODO Should this also be not implemented for directed graphs?
def find_cliques_recursive(G):
"""Returns all maximal cliques in a graph.
For each node *v*, a *maximal clique for v* is a largest complete
subgraph containing *v*. The largest maximal clique is sometimes
called the *maximum clique*.
This function returns an iterator over cliques, each of which is a
list of nodes. It is a recursive implementation, so may suffer from
recursion depth issues.
Parameters
----------
G : NetworkX graph
Returns
-------
iterator
An iterator over maximal cliques, each of which is a list of
nodes in `G`. The order of cliques is arbitrary.
See Also
--------
find_cliques
An iterative version of the same algorithm.
Notes
-----
To obtain a list of all maximal cliques, use
`list(find_cliques_recursive(G))`. However, be aware that in the
worst-case, the length of this list can be exponential in the number
of nodes in the graph. This function avoids storing all cliques in memory
by only keeping current candidate node lists in memory during its search.
This implementation is based on the algorithm published by Bron and
Kerbosch (1973) [1]_, as adapted by <NAME> and Takahashi
(2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a
non-recursive implementation, see :func:`find_cliques`.
This algorithm ignores self-loops and parallel edges, since cliques
are not conventionally defined with such edges.
References
----------
.. [1] <NAME>. and <NAME>.
"Algorithm 457: finding all cliques of an undirected graph".
*Communications of the ACM* 16, 9 (Sep. 1973), 575--577.
<http://portal.acm.org/citation.cfm?doid=362342.362367>
.. [2] <NAME>, <NAME>, <NAME>,
"The worst-case time complexity for generating all maximal
cliques and computational experiments",
*Theoretical Computer Science*, Volume 363, Issue 1,
Computing and Combinatorics,
10th Annual International Conference on
Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42
<https://doi.org/10.1016/j.tcs.2006.06.015>
.. [3] <NAME>, <NAME>,
"A note on the problem of reporting maximal cliques",
*Theoretical Computer Science*,
Volume 407, Issues 1--3, 6 November 2008, Pages 564--568,
<https://doi.org/10.1016/j.tcs.2008.05.010>
"""
if len(G) == 0:
return iter([])
adj = {u: {v for v in G[u] if v != u} for u in G}
Q = []
def expand(subg, cand):
u = max(subg, key=lambda u: len(cand & adj[u]))
for q in cand - adj[u]:
cand.remove(q)
Q.append(q)
adj_q = adj[q]
subg_q = subg & adj_q
if not subg_q:
yield Q[:]
else:
cand_q = cand & adj_q
if cand_q:
yield from expand(subg_q, cand_q)
Q.pop()
return expand(set(G), set(G))
def make_max_clique_graph(G, create_using=None):
"""Returns the maximal clique graph of the given graph.
The nodes of the maximal clique graph of `G` are the cliques of
`G` and an edge joins two cliques if the cliques are not disjoint.
Parameters
----------
G : NetworkX graph
create_using : NetworkX graph constructor, optional (default=nx.Graph)
Graph type to create. If graph instance, then cleared before populated.
Returns
-------
NetworkX graph
A graph whose nodes are the cliques of `G` and whose edges
join two cliques if they are not disjoint.
Notes
-----
This function behaves like the following code::
import networkx as nx
G = nx.make_clique_bipartite(G)
cliques = [v for v in G.nodes() if G.nodes[v]['bipartite'] == 0]
G = nx.bipartite.project(G, cliques)
G = nx.relabel_nodes(G, {-v: v - 1 for v in G})
It should be faster, though, since it skips all the intermediate
steps.
"""
if create_using is None:
B = G.__class__()
else:
B = nx.empty_graph(0, create_using)
cliques = list(enumerate(set(c) for c in find_cliques(G)))
# Add a numbered node for each clique.
B.add_nodes_from(i for i, c in cliques)
# Join cliques by an edge if they share a node.
clique_pairs = combinations(cliques, 2)
B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2)
return B
def make_clique_bipartite(G, fpos=None, create_using=None, name=None):
"""Returns the bipartite clique graph corresponding to `G`.
In the returned bipartite graph, the "bottom" nodes are the nodes of
`G` and the "top" nodes represent the maximal cliques of `G`.
There is an edge from node *v* to clique *C* in the returned graph
if and only if *v* is an element of *C*.
Parameters
----------
G : NetworkX graph
An undirected graph.
fpos : bool
If True or not None, the returned graph will have an
additional attribute, `pos`, a dictionary mapping node to
position in the Euclidean plane.
create_using : NetworkX graph constructor, optional (default=nx.Graph)
Graph type to create. If graph instance, then cleared before populated.
Returns
-------
NetworkX graph
A bipartite graph whose "bottom" set is the nodes of the graph
`G`, whose "top" set is the cliques of `G`, and whose edges
join nodes of `G` to the cliques that contain them.
The nodes of the graph `G` have the node attribute
'bipartite' set to 1 and the nodes representing cliques
have the node attribute 'bipartite' set to 0, as is the
convention for bipartite graphs in NetworkX.
"""
B = nx.empty_graph(0, create_using)
B.clear()
# The "bottom" nodes in the bipartite graph are the nodes of the
# original graph, G.
B.add_nodes_from(G, bipartite=1)
for i, cl in enumerate(find_cliques(G)):
# The "top" nodes in the bipartite graph are the cliques. These
# nodes get negative numbers as labels.
name = -i - 1
B.add_node(name, bipartite=0)
B.add_edges_from((v, name) for v in cl)
return B
def graph_clique_number(G, cliques=None):
"""Returns the clique number of the graph.
The *clique number* of a graph is the size of the largest clique in
the graph.
Parameters
----------
G : NetworkX graph
An undirected graph.
cliques : list
A list of cliques, each of which is itself a list of nodes. If
not specified, the list of all cliques will be computed, as by
:func:`find_cliques`.
Returns
-------
int
The size of the largest clique in `G`.
Notes
-----
You should provide `cliques` if you have already computed the list
of maximal cliques, in order to avoid an exponential time search for
maximal cliques.
"""
if cliques is None:
cliques = find_cliques(G)
if len(G.nodes) < 1:
return 0
return max([len(c) for c in cliques] or [1])
def graph_number_of_cliques(G, cliques=None):
"""Returns the number of maximal cliques in the graph.
Parameters
----------
G : NetworkX graph
An undirected graph.
cliques : list
A list of cliques, each of which is itself a list of nodes. If
not specified, the list of all cliques will be computed, as by
:func:`find_cliques`.
Returns
-------
int
The number of maximal cliques in `G`.
Notes
-----
You should provide `cliques` if you have already computed the list
of maximal cliques, in order to avoid an exponential time search for
maximal cliques.
"""
if cliques is None:
cliques = list(find_cliques(G))
return len(cliques)
def node_clique_number(G, nodes=None, cliques=None):
""" Returns the size of the largest maximal clique containing
each given node.
Returns a single or list depending on input nodes.
Optional list of cliques can be input if already computed.
"""
if cliques is None:
if nodes is not None:
# Use ego_graph to decrease size of graph
if isinstance(nodes, list):
d = {}
for n in nodes:
H = nx.ego_graph(G, n)
d[n] = max(len(c) for c in find_cliques(H))
else:
H = nx.ego_graph(G, nodes)
d = max(len(c) for c in find_cliques(H))
return d
# nodes is None--find all cliques
cliques = list(find_cliques(G))
if nodes is None:
nodes = list(G.nodes()) # none, get entire graph
if not isinstance(nodes, list): # check for a list
v = nodes
# assume it is a single value
d = max([len(c) for c in cliques if v in c])
else:
d = {}
for v in nodes:
d[v] = max([len(c) for c in cliques if v in c])
return d
# if nodes is None: # none, use entire graph
# nodes=G.nodes()
# elif not isinstance(nodes, list): # check for a list
# nodes=[nodes] # assume it is a single value
# if cliques is None:
# cliques=list(find_cliques(G))
# d={}
# for v in nodes:
# d[v]=max([len(c) for c in cliques if v in c])
# if nodes in G:
# return d[v] #return single value
# return d
def number_of_cliques(G, nodes=None, cliques=None):
"""Returns the number of maximal cliques for each node.
Returns a single or list depending on input nodes.
Optional list of cliques can be input if already computed.
"""
if cliques is None:
cliques = list(find_cliques(G))
if nodes is None:
nodes = list(G.nodes()) # none, get entire graph
if not isinstance(nodes, list): # check for a list
v = nodes
# assume it is a single value
numcliq = len([1 for c in cliques if v in c])
else:
numcliq = {}
for v in nodes:
numcliq[v] = len([1 for c in cliques if v in c])
return numcliq
def cliques_containing_node(G, nodes=None, cliques=None):
"""Returns a list of cliques containing the given node.
Returns a single list or list of lists depending on input nodes.
Optional list of cliques can be input if already computed.
"""
if cliques is None:
cliques = list(find_cliques(G))
if nodes is None:
nodes = list(G.nodes()) # none, get entire graph
if not isinstance(nodes, list): # check for a list
v = nodes
# assume it is a single value
vcliques = [c for c in cliques if v in c]
else:
vcliques = {}
for v in nodes:
vcliques[v] = [c for c in cliques if v in c]
return vcliques
class MaxWeightClique(object):
"""A class for the maximum weight clique algorithm.
This class is a helper for the `max_weight_clique` function. The class
should not normally be used directly.
Parameters
----------
G : NetworkX graph
The undirected graph for which a maximum weight clique is sought
weight : string or None, optional (default='weight')
The node attribute that holds the integer value used as a weight.
If None, then each node has weight 1.
Attributes
----------
G : NetworkX graph
The undirected graph for which a maximum weight clique is sought
node_weights: dict
The weight of each node
incumbent_nodes : list
The nodes of the incumbent clique (the best clique found so far)
incumbent_weight: int
The weight of the incumbent clique
"""
def __init__(self, G, weight):
self.G = G
self.incumbent_nodes = []
self.incumbent_weight = 0
if weight is None:
self.node_weights = {v: 1 for v in G.nodes()}
else:
for v in G.nodes():
if weight not in G.nodes[v]:
err = "Node {} does not have the requested weight field."
raise KeyError(err.format(v))
if not isinstance(G.nodes[v][weight], int):
err = "The '{}' field of node {} is not an integer."
raise ValueError(err.format(weight, v))
self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()}
def update_incumbent_if_improved(self, C, C_weight):
"""Update the incumbent if the node set C has greater weight.
C is assumed to be a clique.
"""
if C_weight > self.incumbent_weight:
self.incumbent_nodes = C[:]
self.incumbent_weight = C_weight
def greedily_find_independent_set(self, P):
"""Greedily find an independent set of nodes from a set of
nodes P."""
independent_set = []
P = P[:]
while P:
v = P[0]
independent_set.append(v)
P = [w for w in P if v != w and not self.G.has_edge(v, w)]
return independent_set
def find_branching_nodes(self, P, target):
"""Find a set of nodes to branch on."""
residual_wt = {v: self.node_weights[v] for v in P}
total_wt = 0
P = P[:]
while P:
independent_set = self.greedily_find_independent_set(P)
min_wt_in_class = min(residual_wt[v] for v in independent_set)
total_wt += min_wt_in_class
if total_wt > target:
break
for v in independent_set:
residual_wt[v] -= min_wt_in_class
P = [v for v in P if residual_wt[v] != 0]
return P
def expand(self, C, C_weight, P):
"""Look for the best clique that contains all the nodes in C and zero or
more of the nodes in P, backtracking if it can be shown that no such
clique has greater weight than the incumbent.
"""
self.update_incumbent_if_improved(C, C_weight)
branching_nodes = self.find_branching_nodes(
P, self.incumbent_weight - C_weight)
while branching_nodes:
v = branching_nodes.pop()
P.remove(v)
new_C = C + [v]
new_C_weight = C_weight + self.node_weights[v]
new_P = [w for w in P if self.G.has_edge(v, w)]
self.expand(new_C, new_C_weight, new_P)
def find_max_weight_clique(self):
"""Find a maximum weight clique."""
# Sort nodes in reverse order of degree for speed
nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v),
reverse=True)
nodes = [v for v in nodes if self.node_weights[v] > 0]
self.expand([], 0, nodes)
@not_implemented_for('directed')
def max_weight_clique(G, weight='weight'):
"""Find a maximum weight clique in G.
A *clique* in a graph is a set of nodes such that every two distinct nodes
are adjacent. The *weight* of a clique is the sum of the weights of its
nodes. A *maximum weight clique* of graph G is a clique C in G such that
no clique in G has weight greater than the weight of C.
Parameters
----------
G : NetworkX graph
Undirected graph
weight : string or None, optional (default='weight')
The node attribute that holds the integer value used as a weight.
If None, then each node has weight 1.
Returns
-------
clique : list
the nodes of a maximum weight clique
weight : int
the weight of a maximum weight clique
Notes
-----
The implementation is recursive, and therefore it may run into recursion
depth issues if G contains a clique whose number of nodes is close to the
recursion depth limit.
At each search node, the algorithm greedily constructs a weighted
independent set cover of part of the graph in order to find a small set of
nodes on which to branch. The algorithm is very similar to the algorithm
of Tavares et al. [1]_, other than the fact that the NetworkX version does
not use bitsets. This style of algorithm for maximum weight clique (and
maximum weight independent set, which is the same problem but on the
complement graph) has a decades-long history. See Algorithm B of Warren
and Hicks [2]_ and the references in that paper.
References
----------
.. [1] <NAME>., <NAME>., <NAME>., <NAME>.: Um
algoritmo de branch and bound para o problema da clique máxima
ponderada. Proceedings of XLVII SBPO 1 (2015).
.. [2] Warrent, <NAME>, Hicks, <NAME>.: Combinatorial Branch-and-Bound
for the Maximum Weight Independent Set Problem. Technical Report,
Texas A&M University (2016).
"""
mwc = MaxWeightClique(G, weight)
mwc.find_max_weight_clique()
return mwc.incumbent_nodes, mwc.incumbent_weight
```
#### File: algorithms/traversal/edgebfs.py
```python
from collections import deque
import networkx as nx
FORWARD = "forward"
REVERSE = "reverse"
__all__ = ["edge_bfs"]
def edge_bfs(G, source=None, orientation=None):
"""A directed, breadth-first-search of edges in `G`, beginning at `source`.
Yield the edges of G in a breadth-first-search order continuing until
all edges are generated.
Parameters
----------
G : graph
A directed/undirected graph/multigraph.
source : node, list of nodes
The node from which the traversal begins. If None, then a source
is chosen arbitrarily and repeatedly until all edges from each node in
the graph are searched.
orientation : None | 'original' | 'reverse' | 'ignore' (default: None)
For directed graphs and directed multigraphs, edge traversals need not
respect the original orientation of the edges.
When set to 'reverse' every edge is traversed in the reverse direction.
When set to 'ignore', every edge is treated as undirected.
When set to 'original', every edge is treated as directed.
In all three cases, the yielded edge tuples add a last entry to
indicate the direction in which that edge was traversed.
If orientation is None, the yielded edge has no direction indicated.
The direction is respected, but not reported.
Yields
------
edge : directed edge
A directed edge indicating the path taken by the breadth-first-search.
For graphs, `edge` is of the form `(u, v)` where `u` and `v`
are the tail and head of the edge as determined by the traversal.
For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is
the key of the edge. When the graph is directed, then `u` and `v`
are always in the order of the actual directed edge.
If orientation is not None then the edge tuple is extended to include
the direction of traversal ('forward' or 'reverse') on that edge.
Examples
--------
>>> nodes = [0, 1, 2, 3]
>>> edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)]
>>> list(nx.edge_bfs(nx.Graph(edges), nodes))
[(0, 1), (0, 2), (1, 2), (1, 3)]
>>> list(nx.edge_bfs(nx.DiGraph(edges), nodes))
[(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)]
>>> list(nx.edge_bfs(nx.MultiGraph(edges), nodes))
[(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)]
>>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes))
[(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)]
>>> list(nx.edge_bfs(nx.DiGraph(edges), nodes, orientation='ignore'))
[(0, 1, 'forward'), (1, 0, 'reverse'), (2, 0, 'reverse'), (2, 1, 'reverse'), (3, 1, 'reverse')]
>>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes, orientation='ignore'))
[(0, 1, 0, 'forward'), (1, 0, 0, 'reverse'), (1, 0, 1, 'reverse'), (2, 0, 0, 'reverse'), (2, 1, 0, 'reverse'), (3, 1, 0, 'reverse')]
Notes
-----
The goal of this function is to visit edges. It differs from the more
familiar breadth-first-search of nodes, as provided by
:func:`networkx.algorithms.traversal.breadth_first_search.bfs_edges`, in
that it does not stop once every node has been visited. In a directed graph
with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited
if not for the functionality provided by this function.
The naming of this function is very similar to bfs_edges. The difference
is that 'edge_bfs' yields edges even if they extend back to an already
explored node while 'bfs_edges' yields the edges of the tree that results
from a breadth-first-search (BFS) so no edges are reported if they extend
to already explored nodes. That means 'edge_bfs' reports all edges while
'bfs_edges' only report those traversed by a node-based BFS. Yet another
description is that 'bfs_edges' reports the edges traversed during BFS
while 'edge_bfs' reports all edges in the order they are explored.
See Also
--------
bfs_edges
bfs_tree
edge_dfs
"""
nodes = list(G.nbunch_iter(source))
if not nodes:
return
directed = G.is_directed()
kwds = {"data": False}
if G.is_multigraph() is True:
kwds["keys"] = True
# set up edge lookup
if orientation is None:
def edges_from(node):
return iter(G.edges(node, **kwds))
elif not directed or orientation == "original":
def edges_from(node):
for e in G.edges(node, **kwds):
yield e + (FORWARD,)
elif orientation == "reverse":
def edges_from(node):
for e in G.in_edges(node, **kwds):
yield e + (REVERSE,)
elif orientation == "ignore":
def edges_from(node):
for e in G.edges(node, **kwds):
yield e + (FORWARD,)
for e in G.in_edges(node, **kwds):
yield e + (REVERSE,)
else:
raise nx.NetworkXError("invalid orientation argument.")
if directed:
neighbors = G.successors
def edge_id(edge):
# remove direction indicator
return edge[:-1] if orientation is not None else edge
else:
neighbors = G.neighbors
def edge_id(edge):
return (frozenset(edge[:2]),) + edge[2:]
check_reverse = directed and orientation in ("reverse", "ignore")
# start BFS
visited_nodes = {n for n in nodes}
visited_edges = set()
queue = deque([(n, edges_from(n)) for n in nodes])
while queue:
parent, children_edges = queue.popleft()
for edge in children_edges:
if check_reverse and edge[-1] == REVERSE:
child = edge[0]
else:
child = edge[1]
if child not in visited_nodes:
visited_nodes.add(child)
queue.append((child, edges_from(child)))
edgeid = edge_id(edge)
if edgeid not in visited_edges:
visited_edges.add(edgeid)
yield edge
``` |
{
"source": "JonasCir/1_011_a_infektionsfall_uebermittellung",
"score": 2
} |
#### File: resources/sample_data/genPersons.py
```python
import argparse
import json
import os
import pathlib
import string
from random import randint, randrange, choice, sample, getrandbits, seed
# introduce determinism
seed(42)
last_names = ['Peters', 'Müller', 'Schulz', 'Schulze', 'Weber', 'Wagner', 'Richter', 'Klein', 'Bauer', 'Schröder',
'Lange', 'Winkler', 'Winter', 'Sommer', 'Schmitt', 'Schmidt', 'Berger']
male_first_names = ['Peter', 'Daniel', 'Hans', 'Franz', 'Karl', 'Tim', 'Jan', 'Jens', 'Kai', 'Ben', 'Fin', 'Matthias',
'Christopher', 'Cornelius', 'Konrad']
female_fist_names = ['Jana', 'Lisa', 'Anna', 'Annika', 'Petra', 'Marie', 'Susanne', 'Daniela', 'Petra', 'Martina',
'Emma', 'Hanna', 'Olivia', 'Isabella']
genders = ['male', 'female']
# email_providers = ['t-online', 'posteo', 'gmail', 'gmx', 'web']
email_providers = ['example']
streets = [
"Aarhusweg",
"Im Kramersfeld"
"Aaröstraße",
"Haidthöhe"
"Aarweg",
"Schernerweg",
"Aastwiete",
"Abbachstraße",
"Niekampsweg",
"Abbendiekshof",
"Sonnenweg",
"Wintergasse",
"Südweg",
"Hauptstraße",
"Zähringerstraße",
"Kaiserstraße",
"Waldstraße",
"Steinstraße",
"Hafenstraße",
"Poststraße",
"Hohenzollerstraße",
"Eisenbahnstraße",
"Kronenstraße",
"Bismarckstraße",
"Rosenstraße",
"Tulpenweg",
"Bückerheide",
"Nordstraße",
"Nordtstraße",
"Nordufer"]
# temporarily replace possible cities for simulation
# cities = ['Berlin', 'München', 'Hamburg', 'Köln', 'Düsseldorf', 'Kiel', 'Freiburg', 'Bochum', 'Frankfurt', 'Saarbrücken']
cities = ['Saarbrücken', 'Sulzbach', 'Dudweiler', 'St. Ingbert', 'Saarlouis', 'Völklingen', 'Bous', 'Neunkirchen',
'Homburg', 'Kirkel', 'Heusweiler', 'Riegelsberg', 'Püttlingen', 'St. Wendel', 'Merzig']
insurance_companies = ['AOK', 'Barmer', 'Techniker Krankenkasse', 'IKK Nord', 'KNAPPSCHAFT', 'DAK Gesundheit']
symptoms = [
'LOSS_OF_APPETITE',
'DIFFICULTY_BREATHING',
'SHORTNESS_OF_BREATH',
'FEVER',
'WEIGHT_LOSS',
'COUGH',
'HEADACHE',
'MUSCLE_PAIN',
'BACK_PAIN',
'COLD',
'NAUSEA',
'LOSS_OF_SENSE_OF_SMELL_TASTE'
]
days_in_month = {
"01": 31, "02": 28,
"03": 31, "04": 30,
"05": 31, "06": 30,
"07": 31, "08": 31,
"09": 30, "10": 31,
"11": 30, "12": 31
}
def gen_date_of_birth():
year = 2020 - randint(15, 95)
m = "{:02}".format(randrange(12) + 1)
day = randrange(days_in_month[m]) + 1
d = "{:02}".format(day)
return f'{year}-{m}-{d}'
def rand_num_str(len=10):
return ''.join([str(randint(0, 10)) for _ in range(len)])
exposures = [
'MEDICAL_HEALTH_PROFESSION',
'MEDICAL_LABORATORY',
'STAY_IN_MEDICAL_FACILITY',
'CONTACT_WITH_CORONA_CASE',
'CONTACT_WITH_CORONA_CASE_MEDICAL_FACILITY',
'CONTACT_WITH_CORONA_CASE_PRIVATE',
'CONTACT_WITH_CORONA_CASE_WORK',
'CONTACT_WITH_CORONA_CASE_OTHER',
'COMMUNITY_FACILITY',
'COMMUNITY_FACILITY_MINORS',
]
preIllnesses = [
'ARDS',
'RESPIRATORY_DISEASE',
'CHRONIC_LUNG_DISEASE',
'DIABETES',
'ADIPOSITAS',
'CARDIOVASCULAR_DISEASE',
'IMMUNODEFICIENCY',
'CANCER',
'LIVER_DISEASE',
'NEUROLOGICAL_DISEASE',
'KIDNEY_DISEASE',
'SMOKING',
'PREGNANCY',
'Alzheimer', # Custom pre illness
]
def insurance_number():
upper_alphabet = string.ascii_uppercase
random_letter = choice(upper_alphabet)
number = randint(100000000, 999999999)
return f'{random_letter}{number}'
def replace_umlauts(inp):
return inp.replace('ä', 'ae').replace('ö', 'oe').replace('ü', 'ue')
########################################################################################################################
def gen_person():
gender = choice(genders)
first_name = choice(male_first_names) if gender == 'male' else choice(female_fist_names)
last_name = choice(last_names)
_email = f'{first_name[0].lower()}.{last_name.lower()}@{choice(email_providers)}.de'
email = replace_umlauts(_email)
return {
'lastName': last_name,
'firstName': first_name,
'gender': gender,
'dateOfBirth': gen_date_of_birth(),
'email': email,
'phoneNumber': rand_num_str(),
# include house number within street field
'street': '{} {}'.format(choice(streets), randint(0, 100)),
# 'houseNumber': randint(0, 100),
# temporarily filter zip codes to saarland region (approximately)
# 'zip': rand_num_str(5),
'zip': '66{}'.format(rand_num_str(3)),
'city': choice(cities),
'country': 'DE',
'insuranceCompany': choice(insurance_companies),
'insuranceMembershipNumber': insurance_number(),
'fluImmunization': bool(getrandbits(1)),
'speedOfSymptomsOutbreak': choice(['Langsam', 'Mittel', 'Schnell']),
'symptoms': sample(symptoms, randint(0, len(symptoms))),
'coronaContacts': bool(getrandbits(1)),
'riskAreas': [choice(exposures)] if randint(0, 4) > 1 else [],
'weakenedImmuneSystem': bool(getrandbits(1)),
'preIllnesses': [choice(preIllnesses)] if randint(0, 4) > 1 else [],
'nationality': 'deutsch'
}
def main():
argparser = argparse.ArgumentParser(description='Generate test patients for IMIS.')
argparser.add_argument('-u', '--update', action='store_true',
help='Only overwrite files if this script changed since last generation')
argparser.add_argument('amount', nargs='?', type=int, default=250,
help='Amount of patients to generate')
args = argparser.parse_args()
pathlib.Path('persons').mkdir(parents=True, exist_ok=True)
# Check time of last update of this script
script_last_modified = os.stat(__file__).st_mtime
skipped_count = 0
for i in range(args.amount):
filename = f'persons/person{i}.json'
# Check whether the file needs to be created / overwritten
write_file = not args.update
try:
write_file = write_file or os.stat(filename).st_mtime < script_last_modified
except FileNotFoundError as e:
write_file = True
# Always generate patient so that determinism is preserved
patient = gen_person()
if write_file:
print(f'Writing patient data to file `{filename}`')
with open(filename, 'w+') as f:
f.write(json.dumps(patient, sort_keys=True, indent=2))
else:
skipped_count += 1
print()
print('=================================================')
print()
if skipped_count == 0:
print(f'{args.amount} patient files written, none skipped')
elif skipped_count == args.amount:
print(f'All patient files already up-to-date')
else:
print(f'{args.amount - skipped_count} patient files written, {skipped_count} skipped')
if __name__ == '__main__':
main()
``` |
{
"source": "jonasclaes/project-thomasmore-vanroey-2021",
"score": 3
} |
#### File: python/web/app.py
```python
from flask import Flask, render_template
from flask_socketio import SocketIO, send, emit
app = Flask(__name__)
app.config["SECRET_KEY"] = "secret!"
socketio = SocketIO(app)
@app.route("/page14")
def hello_world():
return render_template("index.html")
@app.route("/page6")
def page1():
return render_template("Milestone1.html")
@app.route("/page5")
def page2():
return render_template("Milestone2.html")
@app.route("/page4")
def page3():
return render_template("Milestone3.html")
@app.route("/page3")
def page4():
return render_template("Milestone4.html")
@app.route("/page2")
def page5():
return render_template("Milestone5.html")
@app.route("/page1")
def page6():
return render_template("Milestone6.html")
@app.route("/page12")
def page7():
return render_template("Milestone7.html")
@app.route("/page11")
def page8():
return render_template("Milestone8.html")
@app.route("/page10")
def page9():
return render_template("Milestone9.html")
@app.route("/page9")
def page10():
return render_template("Milestone10.html")
@app.route("/page8")
def page11():
return render_template("Photo.html")
@app.route("/page7")
def page12():
return render_template("comingsoon.html")
@socketio.on('message')
def handle_message(data):
print('received message: ' + str(data))
send(data)
@socketio.on('json')
def handle_json(json):
print('received json: ' + str(json))
send(json, json=True, broadcast=True)
@socketio.on('change window')
def handle_my_custom_event(json):
print('received json: ' + str(json))
send(json, broadcast=True)
if __name__ == '__main__':
socketio.run(app)
``` |
{
"source": "jonascub/example-telegram-bot",
"score": 3
} |
#### File: jonascub/example-telegram-bot/bot3.py
```python
import os
import qrcode
from telegram import ChatAction, InlineKeyboardMarkup, InlineKeyboardButton
from telegram.ext import Updater, CommandHandler, ConversationHandler, CallbackQueryHandler, MessageHandler, Filters
INPUT_TEXT = 0
def start(update, context):
update.message.reply_text(
text='Hola, bienvenido, qué deseas hacer?\n\nUsa /qr para generar un código QR.',
reply_markup=InlineKeyboardMarkup([
[InlineKeyboardButton(text='Generar QR.', callback_data='qr')],
[InlineKeyboardButton(text='Sobre el Autor', url='https://www.twitter.com/jonascub')]
])
)
def qr_command_handler(update, context):
update.message.reply_text('Enviame el texto para generarte un código QR.')
return INPUT_TEXT
def qr_callback_handler(update, context):
query = update.callback_query
query.answer()
query.edit_message_text(
text='Enviame el texto para generarte un código QR.'
)
return INPUT_TEXT
def input_text(update, context):
chat = update.message.chat
text = update.message.text
filname = generate_qr(text)
send_qr(filname, chat)
return ConversationHandler.END
def generate_qr(text):
filename = text + '.jpg'
img = qrcode.make(text)
img.save(filename)
return filename
def send_qr(filename, chat):
chat.send_action(
action=ChatAction.UPLOAD_PHOTO,
timeout=None
)
chat.send_photo(
photo = open(filename, 'rb')
)
os.unlink(filename)
if __name__ == '__main__':
updater = Updater(token='<KEY>', use_context=True)
dp = updater.dispatcher
dp.add_handler(CommandHandler('start', start))
dp.add_handler(ConversationHandler(
entry_points=[
CommandHandler('qr', qr_command_handler),
CallbackQueryHandler(pattern='qr', callback=qr_callback_handler)
],
states={
INPUT_TEXT: [MessageHandler(Filters.text, input_text)]
},
fallbacks=[]
))
updater.start_polling()
updater.idle()
```
#### File: jonascub/example-telegram-bot/bot.py
```python
import os
import qrcode
from telegram import ChatAction
from telegram.ext import Updater, CommandHandler, ConversationHandler, MessageHandler, Filters
INPUT_TEXT = 0
def start(update, context):
update.message.reply_text('Hola, bienvenido, qué deseas hacer?\n\nUsa /qr para generar un código QR.')
def qr_command_handler(update, context):
update.message.reply_text('Enviame un texto para generarte un código QR.')
return INPUT_TEXT
def input_text(update, context):
chat = update.message.chat
text = update.message.text
filname = generate_qr(text)
send_qr(filname, chat)
return ConversationHandler.END
def generate_qr(text):
filename = text + '.jpg'
img = qrcode.make(text)
img.save(filename)
return filename
def send_qr(filename, chat):
chat.send_action(
action=ChatAction.UPLOAD_PHOTO,
timeout=None
)
chat.send_photo(
photo = open(filename, 'rb')
)
os.unlink(filename)
if __name__ == '__main__':
updater = Updater(token='TOKEN', use_context=True)
dp = updater.dispatcher
dp.add_handler(CommandHandler('start', start))
dp.add_handler(ConversationHandler(
entry_points=[
CommandHandler('qr', qr_command_handler)
],
states={
INPUT_TEXT: [MessageHandler(Filters.text, input_text)]
},
fallbacks=[]
))
updater.start_polling()
updater.idle()
``` |
{
"source": "Jonasdart/SESP-API",
"score": 2
} |
#### File: commons/errors/computersRouteErrors.py
```python
__author__ = '<NAME>'
from datetime import datetime
from resources.models.commons.database_manager import Database
from resources.models.commons.mysql_manager import Gera_query
class ErrorController():
def __init__(self, error, route, method, applicant):
self.error = str(error)
self.route = route
self.method = method
self.applicant = applicant
self.log_register()
def log_register(self):
try:
with open('resources/models/commons/errors/log.log', 'a') as log:
log.write(f'Route: {self.route}\t\tMethod:{self.method}\t\tApplicant:{self.applicant}\t\tError:{self.error}\t\tWhen:{str(datetime.now())}\n')
query = Gera_query().inserir_na_tabela('api_logs', ['type_id', 'route', 'method', 'applicant','body'], ['1', f'"{self.route}"', f'"{self.method}"', f'"{self.applicant}"', self.error])
Database().commit_without_return(query)
except Exception as e:
with open('resources/models/commons/errors/log.log', 'a') as log:
log.write(f'Route: LogRegister\t\t\t\t\tMethod:\t\t\tApplicant:{self.applicant}\t\tError:{e}\t\tWhen:{str(datetime.now())}\n')
raise e
raise self.error
```
#### File: models/commons/models.py
```python
__author__ = '<NAME>'
from resources.models.commons.glpi import Glpi
from resources.models.commons.database_manager import Database
from datetime import datetime
class Backend():
def __init__(self):
self.database = Database()
self.glpi = Glpi()
def return_date_time(self):
try:
date = datetime.now().strftime('%d-%m-%Y')
time = datetime.now().strftime('%H:%M:%S')
self.r = {
'Message' : date + ' ' + time,
'Date' : date,
'Time' : time,
'Status' : 200
}
except Exception as e:
self.r = {
'Message' : {
'Error' : 'Return_date_time => ' + str(e)
},
'Status' : 404
}
return self.r
```
#### File: resources/models/incidentModel.py
```python
__author__ = '<NAME>'
class IncidentModel():
def _search_incidents_by_name(self, name):
raise NotImplementedError
def _search_incidents_by_description(self, description):
raise NotImplementedError
def _search_incident_by_id(self, incident_id):
raise NotImplementedError
```
#### File: SESP-API/resources/scriptsRoute.py
```python
__author__ = '<NAME>'
from flask import request
from flask_restful import Resource
class Scripts(Resource):
def get(self):
'''
Retornar script
'''
raise NotImplementedError
def post(self):
'''
Criar script
'''
raise NotImplementedError
def put(self):
'''
Editar script
'''
raise NotImplementedError
def patch(self):
'''
Ativar/ desativar script
'''
raise NotImplementedError
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.