max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
---|---|---|---|---|---|---|---|---|---|---|
option_batch_trxn.py | shawlu95/td-ameritrade-python-api | 0 | 6631751 | # run at 1:01 pm every day
# 1 13 * * * /Users/shawlu/Documents/anaconda3/envs/analytics/bin/python3 /Users/shawlu/Documents/td-ameritrade-python-api/option_daily_trxn.py
import pandas as pd
from td.client import TDClient
from td.enums import ORDER_SESSION, ORDER_TYPE
from util.options import processOptionTrade, orderedKeys
from config import ACCOUNT_ID, ACCOUNT_NUMBER, ACCOUNT_PASSWORD, CONSUMER_ID, REDIRECT_URI, SECRET_QUESTIONS
# create a new session
sess = TDClient(
account_number = ACCOUNT_NUMBER,
account_password = <PASSWORD>,
consumer_id = CONSUMER_ID,
redirect_uri = REDIRECT_URI,
secret_questions = SECRET_QUESTIONS
)
sess.login()
data = sess.get_transactions(account = ACCOUNT_ID, transaction_type = 'TRADE')
trades = list(filter(lambda entry: entry['type'] == 'TRADE', data))
optionTrades = filter(lambda entry: entry['transactionItem']['instrument']['assetType'] == 'OPTION', trades)
df = pd.DataFrame(columns=orderedKeys, data=[processOptionTrade(entry) for entry in optionTrades])
df = df.sort_values(["orderId", "leg"])
# insert today's option trades into database
from config import DB_USERNAME, DB_PASSWORD, DB_PORT, DB_HOST
from util.DB import DB
db = DB(username=DB_USERNAME, password=<PASSWORD>, host=DB_HOST, port=DB_PORT, database="trading")
df.to_sql('options', db.engine, if_exists='append', index=False)
| # run at 1:01 pm every day
# 1 13 * * * /Users/shawlu/Documents/anaconda3/envs/analytics/bin/python3 /Users/shawlu/Documents/td-ameritrade-python-api/option_daily_trxn.py
import pandas as pd
from td.client import TDClient
from td.enums import ORDER_SESSION, ORDER_TYPE
from util.options import processOptionTrade, orderedKeys
from config import ACCOUNT_ID, ACCOUNT_NUMBER, ACCOUNT_PASSWORD, CONSUMER_ID, REDIRECT_URI, SECRET_QUESTIONS
# create a new session
sess = TDClient(
account_number = ACCOUNT_NUMBER,
account_password = <PASSWORD>,
consumer_id = CONSUMER_ID,
redirect_uri = REDIRECT_URI,
secret_questions = SECRET_QUESTIONS
)
sess.login()
data = sess.get_transactions(account = ACCOUNT_ID, transaction_type = 'TRADE')
trades = list(filter(lambda entry: entry['type'] == 'TRADE', data))
optionTrades = filter(lambda entry: entry['transactionItem']['instrument']['assetType'] == 'OPTION', trades)
df = pd.DataFrame(columns=orderedKeys, data=[processOptionTrade(entry) for entry in optionTrades])
df = df.sort_values(["orderId", "leg"])
# insert today's option trades into database
from config import DB_USERNAME, DB_PASSWORD, DB_PORT, DB_HOST
from util.DB import DB
db = DB(username=DB_USERNAME, password=<PASSWORD>, host=DB_HOST, port=DB_PORT, database="trading")
df.to_sql('options', db.engine, if_exists='append', index=False)
| en | 0.420944 | # run at 1:01 pm every day # 1 13 * * * /Users/shawlu/Documents/anaconda3/envs/analytics/bin/python3 /Users/shawlu/Documents/td-ameritrade-python-api/option_daily_trxn.py # create a new session # insert today's option trades into database | 2.240919 | 2 |
test/test_arnoldi.py | JohnReid/nolina | 0 | 6631752 | <gh_stars>0
from codetiming import Timer
import logging
import pytest
import numpy as np
from nolina import random, arnoldi
_logger = logging.getLogger(__name__)
js = [9, 10, 11]
@pytest.mark.parametrize("j", js)
def test_arnoldi(j, rng, seed):
d = 11
A = random.random_spsd_matrix(d=d, random_state=rng)
b = random.random_vector(d=d, random_state=rng)
with Timer(text='Arnoldi done in {:.4f} seconds', logger=_logger.info):
v = arnoldi.arnoldi(A, b, j=j)
np.testing.assert_almost_equal(np.eye(v.shape[0]), v @ v.T)
| from codetiming import Timer
import logging
import pytest
import numpy as np
from nolina import random, arnoldi
_logger = logging.getLogger(__name__)
js = [9, 10, 11]
@pytest.mark.parametrize("j", js)
def test_arnoldi(j, rng, seed):
d = 11
A = random.random_spsd_matrix(d=d, random_state=rng)
b = random.random_vector(d=d, random_state=rng)
with Timer(text='Arnoldi done in {:.4f} seconds', logger=_logger.info):
v = arnoldi.arnoldi(A, b, j=j)
np.testing.assert_almost_equal(np.eye(v.shape[0]), v @ v.T) | none | 1 | 2.136248 | 2 |
|
roles/slurm/files/citc_gcp.py | danhnguyen48/slurm-elastic-computing | 0 | 6631753 | <filename>roles/slurm/files/citc_gcp.py
import re
import subprocess
import time
from typing import Dict, Optional, Tuple
from google.oauth2 import service_account # type: ignore
import googleapiclient.discovery # type: ignore
import logging
import yaml
import os
from pathlib import Path
import asyncio
__all__ = ["get_nodespace", "start_node"]
def load_yaml(filename: str) -> dict:
with open(filename, "r") as f:
return yaml.safe_load(f)
def get_nodespace(file="/etc/citc/startnode.yaml") -> Dict[str, str]:
"""
Get the information about the space into which we were creating nodes
This will be static for all nodes in this cluster
"""
return load_yaml(file)
def get_node(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> Dict:
filter_clause = f'(name={hostname})'
result = gce_compute.instances().list(project=compartment_id, zone=zone, filter=filter_clause).execute()
item = result['items'][0] if 'items' in result else None
log.debug(f'get items {item}')
return item
def get_node_state(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> Optional[str]:
"""
Get the current node state of the VM for the given hostname
If there is no such VM, return "TERMINATED"
"""
item = get_node(gce_compute, log, compartment_id, zone, hostname)
if item:
return item['status']
return None
def get_ip_for_vm(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> str:
item = get_node(gce_compute, log, compartment_id, zone, hostname)
network = item['networkInterfaces'][0]
log.debug(f'network {network}')
ip = network['networkIP']
return ip
def get_shape(hostname):
features = subprocess.run(["sinfo", "--Format=features:200", "--noheader", f"--nodes={hostname}"], stdout=subprocess.PIPE).stdout.decode().split(',')
shape = [f for f in features if f.startswith("shape=")][0].split("=")[1].strip()
return shape
def create_node_config(gce_compute, hostname: str, ip: Optional[str], nodespace: Dict[str, str], ssh_keys: str):
"""
Create the configuration needed to create ``hostname`` in ``nodespace`` with ``ssh_keys``
"""
shape = get_shape(hostname)
subnet = nodespace["subnet"]
zone = nodespace["zone"]
with open("/home/slurm/bootstrap.sh", "rb") as f:
user_data = f.read().decode()
machine_type = f"zones/{zone}/machineTypes/{shape}"
image_response = gce_compute.images().getFromFamily(project='gce-uefi-images', family='centos-7').execute()
source_disk_image = image_response['selfLink']
config = {
'name': hostname,
'machineType': machine_type,
'disks': [
{
'boot': True,
'autoDelete': True,
'initializeParams': {
'sourceImage': source_disk_image,
}
}
],
'networkInterfaces': [
{
'subnetwork': subnet,
'addressType': 'INTERNAL', # Can't find this in the docs...
'address': ip, # should be networkIP?
'accessConfigs': [
{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}
]
}
],
'minCpuPlatform': 'Intel Skylake',
'metadata': {
'items': [{
'key': 'startup-script',
'value': user_data
}]
},
'tags': {
"items": [
"compute",
],
},
}
return config
def get_ip(hostname: str) -> Tuple[Optional[str], Optional[str], Optional[str]]:
host_dns_match = re.match(r"(\d+\.){3}\d+", subprocess.run(["host", hostname], stdout=subprocess.PIPE).stdout.decode().split()[-1])
dns_ip = host_dns_match.group(0) if host_dns_match else None
slurm_dns_match = re.search(r"NodeAddr=((\d+\.){3}\d+)", subprocess.run(["scontrol", "show", "node", hostname], stdout=subprocess.PIPE).stdout.decode())
slurm_ip = slurm_dns_match.group(1) if slurm_dns_match else None
ip = dns_ip or slurm_ip
return ip, dns_ip, slurm_ip
def get_credentials():
service_account_file = Path(os.environ.get('SA_LOCATION', '/home/slurm/mgmt-sa-credentials.json'))
if service_account_file.exists():
return service_account.Credentials.from_service_account_file(service_account_file)
return None
def get_build():
credentials = get_credentials()
compute = googleapiclient.discovery.build('compute', 'v1', credentials=credentials, cache_discovery=False)
return compute
async def start_node(log, host: str, nodespace: Dict[str, str], ssh_keys: str) -> None:
project = nodespace["compartment_id"]
zone = nodespace["zone"]
log.info(f"Starting {host} in {project} {zone}")
gce_compute = get_build()
while get_node_state(gce_compute, log, project, zone, host) in ["STOPPING", "TERMINATED"]:
log.info(" host is currently being deleted. Waiting...")
await asyncio.sleep(5)
node_state = get_node_state(gce_compute, log, project, zone, host)
if node_state is not None:
log.warning(f" host is already running with state {node_state}")
return
ip, _dns_ip, slurm_ip = get_ip(host)
instance_details = create_node_config(gce_compute, host, ip, nodespace, ssh_keys)
loop = asyncio.get_event_loop()
try:
inserter = gce_compute.instances().insert(project=project, zone=zone, body=instance_details)
instance = await loop.run_in_executor(None, inserter.execute)
except Exception as e:
log.error(f" problem launching instance: {e}")
return
if not slurm_ip:
while not get_node(gce_compute, log, project, zone, host)['networkInterfaces'][0].get("networkIP"):
log.info(f"{host}: No VNIC attachment yet. Waiting...")
await asyncio.sleep(5)
vm_ip = get_ip_for_vm(gce_compute, log, project, zone, host)
log.info(f" Private IP {vm_ip}")
subprocess.run(["scontrol", "update", f"NodeName={host}", f"NodeAddr={vm_ip}"])
log.info(f" Started {host}")
return instance
def terminate_instance(log, hosts, nodespace=None):
gce_compute = get_build()
if not nodespace:
nodespace = get_nodespace()
project = nodespace["compartment_id"]
zone = nodespace["zone"]
for host in hosts:
log.info(f"Stopping {host}")
try:
response = gce_compute.instances() \
.delete(project=project,
zone=zone,
instance=host) \
.execute()
except Exception as e:
log.error(f" problem while stopping: {e}")
continue
log.info(f" Stopped {host}")
# [START run]
async def do_create_instance():
os.environ['SA_LOCATION'] = '/home/davidy/secrets/ex-eccoe-university-bristol-52b726c8a1f3.json'
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
log = logging.getLogger("startnode")
hosts = ['dy-test-node1']
log.info('Creating instance.')
await asyncio.gather(*(
start_node(log, host, get_nodespace('test_nodespace.yaml'), "")
for host in hosts
))
log.info(f'Instances in project done')
log.info(f'Terminating')
terminate_instance(log, hosts, nodespace=get_nodespace('test_nodespace.yaml'))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(do_create_instance())
finally:
loop.close()
| <filename>roles/slurm/files/citc_gcp.py
import re
import subprocess
import time
from typing import Dict, Optional, Tuple
from google.oauth2 import service_account # type: ignore
import googleapiclient.discovery # type: ignore
import logging
import yaml
import os
from pathlib import Path
import asyncio
__all__ = ["get_nodespace", "start_node"]
def load_yaml(filename: str) -> dict:
with open(filename, "r") as f:
return yaml.safe_load(f)
def get_nodespace(file="/etc/citc/startnode.yaml") -> Dict[str, str]:
"""
Get the information about the space into which we were creating nodes
This will be static for all nodes in this cluster
"""
return load_yaml(file)
def get_node(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> Dict:
filter_clause = f'(name={hostname})'
result = gce_compute.instances().list(project=compartment_id, zone=zone, filter=filter_clause).execute()
item = result['items'][0] if 'items' in result else None
log.debug(f'get items {item}')
return item
def get_node_state(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> Optional[str]:
"""
Get the current node state of the VM for the given hostname
If there is no such VM, return "TERMINATED"
"""
item = get_node(gce_compute, log, compartment_id, zone, hostname)
if item:
return item['status']
return None
def get_ip_for_vm(gce_compute, log, compartment_id: str, zone: str, hostname: str) -> str:
item = get_node(gce_compute, log, compartment_id, zone, hostname)
network = item['networkInterfaces'][0]
log.debug(f'network {network}')
ip = network['networkIP']
return ip
def get_shape(hostname):
features = subprocess.run(["sinfo", "--Format=features:200", "--noheader", f"--nodes={hostname}"], stdout=subprocess.PIPE).stdout.decode().split(',')
shape = [f for f in features if f.startswith("shape=")][0].split("=")[1].strip()
return shape
def create_node_config(gce_compute, hostname: str, ip: Optional[str], nodespace: Dict[str, str], ssh_keys: str):
"""
Create the configuration needed to create ``hostname`` in ``nodespace`` with ``ssh_keys``
"""
shape = get_shape(hostname)
subnet = nodespace["subnet"]
zone = nodespace["zone"]
with open("/home/slurm/bootstrap.sh", "rb") as f:
user_data = f.read().decode()
machine_type = f"zones/{zone}/machineTypes/{shape}"
image_response = gce_compute.images().getFromFamily(project='gce-uefi-images', family='centos-7').execute()
source_disk_image = image_response['selfLink']
config = {
'name': hostname,
'machineType': machine_type,
'disks': [
{
'boot': True,
'autoDelete': True,
'initializeParams': {
'sourceImage': source_disk_image,
}
}
],
'networkInterfaces': [
{
'subnetwork': subnet,
'addressType': 'INTERNAL', # Can't find this in the docs...
'address': ip, # should be networkIP?
'accessConfigs': [
{'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT'}
]
}
],
'minCpuPlatform': 'Intel Skylake',
'metadata': {
'items': [{
'key': 'startup-script',
'value': user_data
}]
},
'tags': {
"items": [
"compute",
],
},
}
return config
def get_ip(hostname: str) -> Tuple[Optional[str], Optional[str], Optional[str]]:
host_dns_match = re.match(r"(\d+\.){3}\d+", subprocess.run(["host", hostname], stdout=subprocess.PIPE).stdout.decode().split()[-1])
dns_ip = host_dns_match.group(0) if host_dns_match else None
slurm_dns_match = re.search(r"NodeAddr=((\d+\.){3}\d+)", subprocess.run(["scontrol", "show", "node", hostname], stdout=subprocess.PIPE).stdout.decode())
slurm_ip = slurm_dns_match.group(1) if slurm_dns_match else None
ip = dns_ip or slurm_ip
return ip, dns_ip, slurm_ip
def get_credentials():
service_account_file = Path(os.environ.get('SA_LOCATION', '/home/slurm/mgmt-sa-credentials.json'))
if service_account_file.exists():
return service_account.Credentials.from_service_account_file(service_account_file)
return None
def get_build():
credentials = get_credentials()
compute = googleapiclient.discovery.build('compute', 'v1', credentials=credentials, cache_discovery=False)
return compute
async def start_node(log, host: str, nodespace: Dict[str, str], ssh_keys: str) -> None:
project = nodespace["compartment_id"]
zone = nodespace["zone"]
log.info(f"Starting {host} in {project} {zone}")
gce_compute = get_build()
while get_node_state(gce_compute, log, project, zone, host) in ["STOPPING", "TERMINATED"]:
log.info(" host is currently being deleted. Waiting...")
await asyncio.sleep(5)
node_state = get_node_state(gce_compute, log, project, zone, host)
if node_state is not None:
log.warning(f" host is already running with state {node_state}")
return
ip, _dns_ip, slurm_ip = get_ip(host)
instance_details = create_node_config(gce_compute, host, ip, nodespace, ssh_keys)
loop = asyncio.get_event_loop()
try:
inserter = gce_compute.instances().insert(project=project, zone=zone, body=instance_details)
instance = await loop.run_in_executor(None, inserter.execute)
except Exception as e:
log.error(f" problem launching instance: {e}")
return
if not slurm_ip:
while not get_node(gce_compute, log, project, zone, host)['networkInterfaces'][0].get("networkIP"):
log.info(f"{host}: No VNIC attachment yet. Waiting...")
await asyncio.sleep(5)
vm_ip = get_ip_for_vm(gce_compute, log, project, zone, host)
log.info(f" Private IP {vm_ip}")
subprocess.run(["scontrol", "update", f"NodeName={host}", f"NodeAddr={vm_ip}"])
log.info(f" Started {host}")
return instance
def terminate_instance(log, hosts, nodespace=None):
gce_compute = get_build()
if not nodespace:
nodespace = get_nodespace()
project = nodespace["compartment_id"]
zone = nodespace["zone"]
for host in hosts:
log.info(f"Stopping {host}")
try:
response = gce_compute.instances() \
.delete(project=project,
zone=zone,
instance=host) \
.execute()
except Exception as e:
log.error(f" problem while stopping: {e}")
continue
log.info(f" Stopped {host}")
# [START run]
async def do_create_instance():
os.environ['SA_LOCATION'] = '/home/davidy/secrets/ex-eccoe-university-bristol-52b726c8a1f3.json'
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
log = logging.getLogger("startnode")
hosts = ['dy-test-node1']
log.info('Creating instance.')
await asyncio.gather(*(
start_node(log, host, get_nodespace('test_nodespace.yaml'), "")
for host in hosts
))
log.info(f'Instances in project done')
log.info(f'Terminating')
terminate_instance(log, hosts, nodespace=get_nodespace('test_nodespace.yaml'))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(do_create_instance())
finally:
loop.close()
| en | 0.825925 | # type: ignore # type: ignore Get the information about the space into which we were creating nodes This will be static for all nodes in this cluster Get the current node state of the VM for the given hostname If there is no such VM, return "TERMINATED" Create the configuration needed to create ``hostname`` in ``nodespace`` with ``ssh_keys`` # Can't find this in the docs... # should be networkIP? # [START run] | 2.228892 | 2 |
app/back/mongo/data/collect/ports/mongo.py | jgphilpott/polyplot | 5 | 6631754 | from back.mongo.data.collect.ions import find_collection
from back.mongo.data.collect.ports.model import Port
collection = find_collection("ports")
def find_port(query={}, filter={"_id": 0}):
return dict(collection.find_one(query, filter))
def find_ports(query={}, filter={"_id": 0}, sort=[("properties.flow", -1), ("properties.code", 1)], limit=0):
collection.create_index(sort)
return list(collection.find(query, filter).sort(sort).limit(limit))
| from back.mongo.data.collect.ions import find_collection
from back.mongo.data.collect.ports.model import Port
collection = find_collection("ports")
def find_port(query={}, filter={"_id": 0}):
return dict(collection.find_one(query, filter))
def find_ports(query={}, filter={"_id": 0}, sort=[("properties.flow", -1), ("properties.code", 1)], limit=0):
collection.create_index(sort)
return list(collection.find(query, filter).sort(sort).limit(limit))
| none | 1 | 2.501165 | 3 |
|
libs/python-axolotl/axolotl/state/signedprekeyrecord.py | akshitpradhan/TomHack | 4 | 6631755 | # -*- coding; utf-8 -*-
from .storageprotos_pb2 import SignedPreKeyRecordStructure
from ..ecc.curve import Curve
from ..ecc.eckeypair import ECKeyPair
class SignedPreKeyRecord:
def __init__(self, _id=None, timestamp=None, ecKeyPair=None, signature=None, serialized=None):
self.structure = SignedPreKeyRecordStructure()
if serialized:
self.structure.ParseFromString(serialized)
else:
self.structure.id = _id
self.structure.publicKey = ecKeyPair.getPublicKey().serialize()
self.structure.privateKey = ecKeyPair.getPrivateKey().serialize()
self.structure.signature = signature
self.structure.timestamp = timestamp
def getId(self):
return self.structure.id
def getTimestamp(self):
return self.structure.timestamp
def getKeyPair(self):
publicKey = Curve.decodePoint(bytearray(self.structure.publicKey), 0)
privateKey = Curve.decodePrivatePoint(bytearray(self.structure.privateKey))
return ECKeyPair(publicKey, privateKey)
def getSignature(self):
return self.structure.signature
def serialize(self):
return self.structure.SerializeToString()
| # -*- coding; utf-8 -*-
from .storageprotos_pb2 import SignedPreKeyRecordStructure
from ..ecc.curve import Curve
from ..ecc.eckeypair import ECKeyPair
class SignedPreKeyRecord:
def __init__(self, _id=None, timestamp=None, ecKeyPair=None, signature=None, serialized=None):
self.structure = SignedPreKeyRecordStructure()
if serialized:
self.structure.ParseFromString(serialized)
else:
self.structure.id = _id
self.structure.publicKey = ecKeyPair.getPublicKey().serialize()
self.structure.privateKey = ecKeyPair.getPrivateKey().serialize()
self.structure.signature = signature
self.structure.timestamp = timestamp
def getId(self):
return self.structure.id
def getTimestamp(self):
return self.structure.timestamp
def getKeyPair(self):
publicKey = Curve.decodePoint(bytearray(self.structure.publicKey), 0)
privateKey = Curve.decodePrivatePoint(bytearray(self.structure.privateKey))
return ECKeyPair(publicKey, privateKey)
def getSignature(self):
return self.structure.signature
def serialize(self):
return self.structure.SerializeToString()
| en | 0.712927 | # -*- coding; utf-8 -*- | 2.428625 | 2 |
mix-list/mixlist01.py | MarkDuenas/mycode | 0 | 6631756 | #!/usr/bin/env python3
my_list = ["192.168.0.5", 5060, "UP"]
print("The first item in the list (IP): " + my_list[0])
print("The second item in the list (port): " + str(my_list[1]))
print("The last item in the list (state): " + my_list[2])
# display only the IP addresses to the screen.
iplist = [5060, "80", 55, "10.0.0.1", "10.20.30.1", "ssh"]
# example 1 - add up the strings
print("IP addresses: " + iplist[3] + ", and " + iplist[4])
# example 2 - use the comma separator
print("IP addresses:", iplist[3], ", and", iplist[4])
# example 3 - use an 'f-string'
print(f"IP addresses: {iplist[3]}, and {iplist[4]}")
| #!/usr/bin/env python3
my_list = ["192.168.0.5", 5060, "UP"]
print("The first item in the list (IP): " + my_list[0])
print("The second item in the list (port): " + str(my_list[1]))
print("The last item in the list (state): " + my_list[2])
# display only the IP addresses to the screen.
iplist = [5060, "80", 55, "10.0.0.1", "10.20.30.1", "ssh"]
# example 1 - add up the strings
print("IP addresses: " + iplist[3] + ", and " + iplist[4])
# example 2 - use the comma separator
print("IP addresses:", iplist[3], ", and", iplist[4])
# example 3 - use an 'f-string'
print(f"IP addresses: {iplist[3]}, and {iplist[4]}")
| en | 0.380202 | #!/usr/bin/env python3 # display only the IP addresses to the screen. # example 1 - add up the strings # example 2 - use the comma separator # example 3 - use an 'f-string' | 4.120486 | 4 |
scripts/venv/lib/python2.7/site-packages/ete2/tools/phylobuild_lib/task/fasttree.py | sauloal/cnidaria | 3 | 6631757 | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# <NAME>, <NAME> and <NAME>.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: <EMAIL>
#
#
# #END_LICENSE#############################################################
import os
import sys
import re
import shutil
import logging
log = logging.getLogger("main")
from ete2.tools.phylobuild_lib.master_task import TreeTask
from ete2.tools.phylobuild_lib.master_job import Job
from ete2.tools.phylobuild_lib.utils import (basename, Tree, OrderedDict, GLOBALS,
FASTTREE_CITE, DATATYPES, pjoin)
from ete2.tools.phylobuild_lib import db
__all__ = ["FastTree"]
class FastTree(TreeTask):
def __init__(self, nodeid, alg_file, constrain_id, model, seqtype,
conf, confname, parts_id=None):
GLOBALS["citator"].add(FASTTREE_CITE)
self.confname = confname
self.conf = conf
self.alg_phylip_file = alg_file
self.constrain_tree = None
if constrain_id:
self.constrain_tree = db.get_dataid(constrain_id, DATATYPES.constrain_alg)
self.alg_basename = basename(self.alg_phylip_file)
self.seqtype = seqtype
self.tree_file = ""
if model:
log.warning("FastTree does not support model selection")
self.model = None
self.lk = None
base_args = OrderedDict()
base_args["-nopr"] = ""
if self.seqtype == "nt":
base_args["-gtr -nt"] = ""
elif self.seqtype == "aa":
pass
else:
raise ValueError("Unknown seqtype %s" %self.seqtype)
TreeTask.__init__(self, nodeid, "tree", "FastTree", base_args,
self.conf[confname])
self.init()
def load_jobs(self):
args = self.args.copy()
try:
del args["-wag"]
except KeyError:
pass
if self.constrain_tree:
args["-constraints"] = pjoin(GLOBALS["input_dir"], self.constrain_tree)
args[pjoin(GLOBALS["input_dir"], self.alg_phylip_file)] = ""
appname = self.conf[self.confname]["_app"]
job = Job(self.conf["app"][appname], args, parent_ids=[self.nodeid])
job.cores = self.conf["threading"][appname]
if self.constrain_tree:
job.add_input_file(self.constrain_tree)
job.add_input_file(self.alg_phylip_file)
self.jobs.append(job)
def finish(self):
job = self.jobs[-1]
t = Tree(job.stdout_file)
TreeTask.store_data(self, t.write(), {})
| # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# <NAME>, <NAME> and <NAME>.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: <EMAIL>
#
#
# #END_LICENSE#############################################################
import os
import sys
import re
import shutil
import logging
log = logging.getLogger("main")
from ete2.tools.phylobuild_lib.master_task import TreeTask
from ete2.tools.phylobuild_lib.master_job import Job
from ete2.tools.phylobuild_lib.utils import (basename, Tree, OrderedDict, GLOBALS,
FASTTREE_CITE, DATATYPES, pjoin)
from ete2.tools.phylobuild_lib import db
__all__ = ["FastTree"]
class FastTree(TreeTask):
def __init__(self, nodeid, alg_file, constrain_id, model, seqtype,
conf, confname, parts_id=None):
GLOBALS["citator"].add(FASTTREE_CITE)
self.confname = confname
self.conf = conf
self.alg_phylip_file = alg_file
self.constrain_tree = None
if constrain_id:
self.constrain_tree = db.get_dataid(constrain_id, DATATYPES.constrain_alg)
self.alg_basename = basename(self.alg_phylip_file)
self.seqtype = seqtype
self.tree_file = ""
if model:
log.warning("FastTree does not support model selection")
self.model = None
self.lk = None
base_args = OrderedDict()
base_args["-nopr"] = ""
if self.seqtype == "nt":
base_args["-gtr -nt"] = ""
elif self.seqtype == "aa":
pass
else:
raise ValueError("Unknown seqtype %s" %self.seqtype)
TreeTask.__init__(self, nodeid, "tree", "FastTree", base_args,
self.conf[confname])
self.init()
def load_jobs(self):
args = self.args.copy()
try:
del args["-wag"]
except KeyError:
pass
if self.constrain_tree:
args["-constraints"] = pjoin(GLOBALS["input_dir"], self.constrain_tree)
args[pjoin(GLOBALS["input_dir"], self.alg_phylip_file)] = ""
appname = self.conf[self.confname]["_app"]
job = Job(self.conf["app"][appname], args, parent_ids=[self.nodeid])
job.cores = self.conf["threading"][appname]
if self.constrain_tree:
job.add_input_file(self.constrain_tree)
job.add_input_file(self.alg_phylip_file)
self.jobs.append(job)
def finish(self):
job = self.jobs[-1]
t = Tree(job.stdout_file)
TreeTask.store_data(self, t.write(), {})
| en | 0.686451 | # #START_LICENSE########################################################### # # # This file is part of the Environment for Tree Exploration program # (ETE). http://etetoolkit.org # # ETE is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ETE is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public # License for more details. # # You should have received a copy of the GNU General Public License # along with ETE. If not, see <http://www.gnu.org/licenses/>. # # # ABOUT THE ETE PACKAGE # ===================== # # ETE is distributed under the GPL copyleft license (2008-2015). # # If you make use of ETE in published work, please cite: # # <NAME>, <NAME> and <NAME>. # ETE: a python Environment for Tree Exploration. Jaime BMC # Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24 # # Note that extra references to the specific methods implemented in # the toolkit may be available in the documentation. # # More info at http://etetoolkit.org. Contact: <EMAIL> # # # #END_LICENSE############################################################# | 1.709039 | 2 |
binding.gyp | HexF/node-waitpid | 3 | 6631758 | <gh_stars>1-10
{
"targets": [
{
"target_name": "waitpid",
"sources": [ "src/waitpid.cc" ]
}
]
}
| {
"targets": [
{
"target_name": "waitpid",
"sources": [ "src/waitpid.cc" ]
}
]
} | none | 1 | 1.05333 | 1 |
|
setup.py | rebeccaserramari/Haplotype_blockparsing | 0 | 6631759 | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
from Cython.Distutils import build_ext
ext_modules = [Extension("blockparser", ["blockparser.pyx", "DP_matrix.cpp"], language='c++',extra_compile_args=["-std=c++11", "-O2"], extra_link_args=["-std=c++11"])]
setup(cmdclass = {'build_ext': build_ext}, ext_modules = ext_modules) | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
from Cython.Distutils import build_ext
ext_modules = [Extension("blockparser", ["blockparser.pyx", "DP_matrix.cpp"], language='c++',extra_compile_args=["-std=c++11", "-O2"], extra_link_args=["-std=c++11"])]
setup(cmdclass = {'build_ext': build_ext}, ext_modules = ext_modules) | none | 1 | 1.568971 | 2 |
|
check_homework.py | zuzannnaobajtek/github-cmake-project-checker | 1 | 6631760 | <gh_stars>1-10
from argparse import ArgumentParser
from project_checker.checker.pull_all_links import make_default_config
from project_checker.checker.pull_all_links import check_homework_by_configuration
def make_program_arg_parser():
parser = ArgumentParser(prog='check homework')
parser.add_argument('-r', '--only-repositories', dest='repos', type=str, default='',
help='regex matchers separated with colon (:) of repositories to include')
parser.add_argument('-t', '--only-homeworks', dest='homeworks', type=str, default='',
help='regex matchers separated with colon (:) of homeworks to include')
parser.add_argument('-u', '--update-local-repositories', dest='pull',
help='pull updates from remote repositories',
action='store_true')
return parser
def main_args(args):
config = make_default_config()
repos = args.repos.split(':')
if len(args.repos) > 0 and len(repos) > 0:
print('only projects: ' + str(repos))
config.repository_owners.exclude_other_projects_than(repos)
else:
print('processing all projects')
homework = args.homeworks.split(':')
if len(args.homeworks) > 0 and len(homework) > 0:
print('only homeworks: ' + str(homework))
config.homework.exclude_other_homework_than(homework)
else:
print('processing all homeworks')
if args.pull:
print('update')
else:
print('old version of pull')
check_homework_by_configuration(config, pull_new_version=args.pull)
def main():
parser = make_program_arg_parser()
args = parser.parse_args()
main_args(args)
if __name__ == "__main__":
main()
| from argparse import ArgumentParser
from project_checker.checker.pull_all_links import make_default_config
from project_checker.checker.pull_all_links import check_homework_by_configuration
def make_program_arg_parser():
parser = ArgumentParser(prog='check homework')
parser.add_argument('-r', '--only-repositories', dest='repos', type=str, default='',
help='regex matchers separated with colon (:) of repositories to include')
parser.add_argument('-t', '--only-homeworks', dest='homeworks', type=str, default='',
help='regex matchers separated with colon (:) of homeworks to include')
parser.add_argument('-u', '--update-local-repositories', dest='pull',
help='pull updates from remote repositories',
action='store_true')
return parser
def main_args(args):
config = make_default_config()
repos = args.repos.split(':')
if len(args.repos) > 0 and len(repos) > 0:
print('only projects: ' + str(repos))
config.repository_owners.exclude_other_projects_than(repos)
else:
print('processing all projects')
homework = args.homeworks.split(':')
if len(args.homeworks) > 0 and len(homework) > 0:
print('only homeworks: ' + str(homework))
config.homework.exclude_other_homework_than(homework)
else:
print('processing all homeworks')
if args.pull:
print('update')
else:
print('old version of pull')
check_homework_by_configuration(config, pull_new_version=args.pull)
def main():
parser = make_program_arg_parser()
args = parser.parse_args()
main_args(args)
if __name__ == "__main__":
main() | none | 1 | 2.734619 | 3 |
|
paz/pipelines/pose.py | niqbal996/paz | 300 | 6631761 | from ..abstract import Processor, SequentialProcessor
from .. import processors as pr
from .detection import HaarCascadeFrontalFace
from .keypoints import FaceKeypointNet2D32
import numpy as np
FACE_KEYPOINTNET3D = np.array([
[-220, 678, 1138], # left--center-eye
[+220, 678, 1138], # right-center-eye
[-131, 676, 1107], # left--eye close to nose
[-294, 610, 1123], # left--eye close to ear
[+131, 676, 1107], # right-eye close to nose
[+294, 610, 1123], # right-eye close to ear
[-106, 758, 1224], # left--eyebrow close to nose
[-375, 585, 1208], # left--eyebrow close to ear
[+106, 758, 1224], # right-eyebrow close to nose
[+375, 585, 1208], # right-eyebrow close to ear
[0.0, 909, 919], # nose
[-183, 691, 683], # lefty-lip
[+183, 691, 683], # right-lip
[0.0, 826, 754], # up---lip
[0.0, 815, 645], # down-lip
])
FACE_KEYPOINTNET3D = FACE_KEYPOINTNET3D - np.mean(FACE_KEYPOINTNET3D, axis=0)
class EstimatePoseKeypoints(Processor):
def __init__(self, detect, estimate_keypoints, camera, offsets,
model_points, class_to_dimensions, radius=3, thickness=1):
"""Pose estimation pipeline using keypoints.
# Arguments
detect: Function that outputs a dictionary with a key
``boxes2D`` having a list of ``Box2D`` messages.
estimate_keypoints: Function that outputs a dictionary
with a key ``keypoints`` with numpy array as value
camera: Instance of ``paz.backend.camera.Camera`` with
camera intrinsics.
offsets: List of floats indicating the scaled offset to
be added to the ``Box2D`` coordinates.
model_points: Numpy array of shape ``(num_keypoints, 3)``
indicating the 3D coordinates of the predicted keypoints
from the ``esimate_keypoints`` function.
class_to_dimensions: Dictionary with keys being the class labels
of the predicted ``Box2D`` messages and the values a list of
two integers indicating the height and width of the object.
e.g. {'PowerDrill': [30, 20]}.
radius: Int. radius of keypoint to be drawn.
thickness: Int. thickness of 3D box.
# Returns
A function that takes an RGB image and outputs the following
inferences as keys of a dictionary:
``image``, ``boxes2D``, ``keypoints`` and ``poses6D``.
"""
super(EstimatePoseKeypoints, self).__init__()
self.num_keypoints = estimate_keypoints.num_keypoints
self.detect = detect
self.estimate_keypoints = estimate_keypoints
self.square = SequentialProcessor()
self.square.add(pr.SquareBoxes2D())
self.square.add(pr.OffsetBoxes2D(offsets))
self.clip = pr.ClipBoxes2D()
self.crop = pr.CropBoxes2D()
self.change_coordinates = pr.ChangeKeypointsCoordinateSystem()
self.solve_PNP = pr.SolvePNP(model_points, camera)
self.draw_keypoints = pr.DrawKeypoints2D(self.num_keypoints, radius)
self.draw_box = pr.DrawBoxes3D(camera, class_to_dimensions, thickness)
self.wrap = pr.WrapOutput(['image', 'boxes2D', 'keypoints', 'poses6D'])
def call(self, image):
boxes2D = self.detect(image)['boxes2D']
boxes2D = self.square(boxes2D)
boxes2D = self.clip(image, boxes2D)
cropped_images = self.crop(image, boxes2D)
poses6D, keypoints2D = [], []
for cropped_image, box2D in zip(cropped_images, boxes2D):
keypoints = self.estimate_keypoints(cropped_image)['keypoints']
keypoints = self.change_coordinates(keypoints, box2D)
pose6D = self.solve_PNP(keypoints)
image = self.draw_keypoints(image, keypoints)
image = self.draw_box(image, pose6D)
keypoints2D.append(keypoints)
poses6D.append(pose6D)
return self.wrap(image, boxes2D, keypoints2D, poses6D)
class HeadPoseKeypointNet2D32(EstimatePoseKeypoints):
"""Head pose estimation pipeline using a ``HaarCascade`` face detector
and a pre-trained ``KeypointNet2D`` estimation model.
# Arguments
camera: Instance of ``paz.backend.camera.Camera`` with
camera intrinsics.
offsets: List of floats indicating the scaled offset to
be added to the ``Box2D`` coordinates.
radius: Int. radius of keypoint to be drawn.
# Example
``` python
from paz.pipelines import HeadPoseKeypointNet2D32
estimate_pose = HeadPoseKeypointNet2D32()
# apply directly to an image (numpy-array)
inferences = estimate_pose(image)
```
# Returns
A function that takes an RGB image and outputs the following
inferences as keys of a dictionary:
``image``, ``boxes2D``, ``keypoints`` and ``poses6D``.
"""
def __init__(self, camera, offsets=[0, 0], radius=3, thickness=1):
detect = HaarCascadeFrontalFace(draw=False)
estimate_keypoints = FaceKeypointNet2D32(draw=False)
super(HeadPoseKeypointNet2D32, self).__init__(
detect, estimate_keypoints, camera, offsets,
FACE_KEYPOINTNET3D, {None: [900.0, 600.0]}, radius, thickness)
| from ..abstract import Processor, SequentialProcessor
from .. import processors as pr
from .detection import HaarCascadeFrontalFace
from .keypoints import FaceKeypointNet2D32
import numpy as np
FACE_KEYPOINTNET3D = np.array([
[-220, 678, 1138], # left--center-eye
[+220, 678, 1138], # right-center-eye
[-131, 676, 1107], # left--eye close to nose
[-294, 610, 1123], # left--eye close to ear
[+131, 676, 1107], # right-eye close to nose
[+294, 610, 1123], # right-eye close to ear
[-106, 758, 1224], # left--eyebrow close to nose
[-375, 585, 1208], # left--eyebrow close to ear
[+106, 758, 1224], # right-eyebrow close to nose
[+375, 585, 1208], # right-eyebrow close to ear
[0.0, 909, 919], # nose
[-183, 691, 683], # lefty-lip
[+183, 691, 683], # right-lip
[0.0, 826, 754], # up---lip
[0.0, 815, 645], # down-lip
])
FACE_KEYPOINTNET3D = FACE_KEYPOINTNET3D - np.mean(FACE_KEYPOINTNET3D, axis=0)
class EstimatePoseKeypoints(Processor):
def __init__(self, detect, estimate_keypoints, camera, offsets,
model_points, class_to_dimensions, radius=3, thickness=1):
"""Pose estimation pipeline using keypoints.
# Arguments
detect: Function that outputs a dictionary with a key
``boxes2D`` having a list of ``Box2D`` messages.
estimate_keypoints: Function that outputs a dictionary
with a key ``keypoints`` with numpy array as value
camera: Instance of ``paz.backend.camera.Camera`` with
camera intrinsics.
offsets: List of floats indicating the scaled offset to
be added to the ``Box2D`` coordinates.
model_points: Numpy array of shape ``(num_keypoints, 3)``
indicating the 3D coordinates of the predicted keypoints
from the ``esimate_keypoints`` function.
class_to_dimensions: Dictionary with keys being the class labels
of the predicted ``Box2D`` messages and the values a list of
two integers indicating the height and width of the object.
e.g. {'PowerDrill': [30, 20]}.
radius: Int. radius of keypoint to be drawn.
thickness: Int. thickness of 3D box.
# Returns
A function that takes an RGB image and outputs the following
inferences as keys of a dictionary:
``image``, ``boxes2D``, ``keypoints`` and ``poses6D``.
"""
super(EstimatePoseKeypoints, self).__init__()
self.num_keypoints = estimate_keypoints.num_keypoints
self.detect = detect
self.estimate_keypoints = estimate_keypoints
self.square = SequentialProcessor()
self.square.add(pr.SquareBoxes2D())
self.square.add(pr.OffsetBoxes2D(offsets))
self.clip = pr.ClipBoxes2D()
self.crop = pr.CropBoxes2D()
self.change_coordinates = pr.ChangeKeypointsCoordinateSystem()
self.solve_PNP = pr.SolvePNP(model_points, camera)
self.draw_keypoints = pr.DrawKeypoints2D(self.num_keypoints, radius)
self.draw_box = pr.DrawBoxes3D(camera, class_to_dimensions, thickness)
self.wrap = pr.WrapOutput(['image', 'boxes2D', 'keypoints', 'poses6D'])
def call(self, image):
boxes2D = self.detect(image)['boxes2D']
boxes2D = self.square(boxes2D)
boxes2D = self.clip(image, boxes2D)
cropped_images = self.crop(image, boxes2D)
poses6D, keypoints2D = [], []
for cropped_image, box2D in zip(cropped_images, boxes2D):
keypoints = self.estimate_keypoints(cropped_image)['keypoints']
keypoints = self.change_coordinates(keypoints, box2D)
pose6D = self.solve_PNP(keypoints)
image = self.draw_keypoints(image, keypoints)
image = self.draw_box(image, pose6D)
keypoints2D.append(keypoints)
poses6D.append(pose6D)
return self.wrap(image, boxes2D, keypoints2D, poses6D)
class HeadPoseKeypointNet2D32(EstimatePoseKeypoints):
"""Head pose estimation pipeline using a ``HaarCascade`` face detector
and a pre-trained ``KeypointNet2D`` estimation model.
# Arguments
camera: Instance of ``paz.backend.camera.Camera`` with
camera intrinsics.
offsets: List of floats indicating the scaled offset to
be added to the ``Box2D`` coordinates.
radius: Int. radius of keypoint to be drawn.
# Example
``` python
from paz.pipelines import HeadPoseKeypointNet2D32
estimate_pose = HeadPoseKeypointNet2D32()
# apply directly to an image (numpy-array)
inferences = estimate_pose(image)
```
# Returns
A function that takes an RGB image and outputs the following
inferences as keys of a dictionary:
``image``, ``boxes2D``, ``keypoints`` and ``poses6D``.
"""
def __init__(self, camera, offsets=[0, 0], radius=3, thickness=1):
detect = HaarCascadeFrontalFace(draw=False)
estimate_keypoints = FaceKeypointNet2D32(draw=False)
super(HeadPoseKeypointNet2D32, self).__init__(
detect, estimate_keypoints, camera, offsets,
FACE_KEYPOINTNET3D, {None: [900.0, 600.0]}, radius, thickness)
| en | 0.735551 | # left--center-eye # right-center-eye # left--eye close to nose # left--eye close to ear # right-eye close to nose # right-eye close to ear # left--eyebrow close to nose # left--eyebrow close to ear # right-eyebrow close to nose # right-eyebrow close to ear # nose # lefty-lip # right-lip # up---lip # down-lip Pose estimation pipeline using keypoints. # Arguments detect: Function that outputs a dictionary with a key ``boxes2D`` having a list of ``Box2D`` messages. estimate_keypoints: Function that outputs a dictionary with a key ``keypoints`` with numpy array as value camera: Instance of ``paz.backend.camera.Camera`` with camera intrinsics. offsets: List of floats indicating the scaled offset to be added to the ``Box2D`` coordinates. model_points: Numpy array of shape ``(num_keypoints, 3)`` indicating the 3D coordinates of the predicted keypoints from the ``esimate_keypoints`` function. class_to_dimensions: Dictionary with keys being the class labels of the predicted ``Box2D`` messages and the values a list of two integers indicating the height and width of the object. e.g. {'PowerDrill': [30, 20]}. radius: Int. radius of keypoint to be drawn. thickness: Int. thickness of 3D box. # Returns A function that takes an RGB image and outputs the following inferences as keys of a dictionary: ``image``, ``boxes2D``, ``keypoints`` and ``poses6D``. Head pose estimation pipeline using a ``HaarCascade`` face detector and a pre-trained ``KeypointNet2D`` estimation model. # Arguments camera: Instance of ``paz.backend.camera.Camera`` with camera intrinsics. offsets: List of floats indicating the scaled offset to be added to the ``Box2D`` coordinates. radius: Int. radius of keypoint to be drawn. # Example ``` python from paz.pipelines import HeadPoseKeypointNet2D32 estimate_pose = HeadPoseKeypointNet2D32() # apply directly to an image (numpy-array) inferences = estimate_pose(image) ``` # Returns A function that takes an RGB image and outputs the following inferences as keys of a dictionary: ``image``, ``boxes2D``, ``keypoints`` and ``poses6D``. | 2.528151 | 3 |
model/Reff_constants.py | tdennisliu/covid19-forecasting-aus | 7 | 6631762 | public_holidays ={
'ACT': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Canberra Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-01": "Reconciliation Day",
"2020-06-08": "Queen's Birthday",
"2020-10-05": "Labour Day",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day"
},
'NSW': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-01": "Reconciliation Day",
"2020-06-08": "Queen's Birthday",
"2020-08-03": "Bank Holiday",
"2020-10-05": "Labour Day",
"2020-12-25": "Christmas Day",
"2020-12-26": "Christmas Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'NT': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Saturday before Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-05-04": "May Day",
"2020-06-08": "Queen's Birthday",
"2020-08-03": "Picnic Day",
"2020-10-05": "Labour Day",
"2020-12-24": "Christmas Eve",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day",
"2020-12-31": "New Year's Eve"
},
'QLD': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-05-04": "Labour Day",
"2020-10-05": "Queen's Birthday",
"2020-12-25": "Christmas Day",
"2020-12-26": "Boxing Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'SA': {
"2020-01-01": "New Year's Day",
"2020-01-26": "Australia Day",
"2020-01-27": "Australia Day (Additional day)",
"2020-03-09": "Adelaide Cup Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-10-05": "Labour Day",
"2020-12-24": "Christmas Eve",
"2020-12-25": "Christmas Day",
"2020-12-28": "Proclamation Day",
"2020-12-31": "New Year's Eve"
},
'TAS': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Eight Hours Day",
"2020-04-10": "Good Friday",
"2020-04-13": "Easter Monday",
"2020-04-14": "Easter Tuesday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day"
},
'VIC': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Labour Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Saturday before Easter Sunday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-09-25": "Friday before AFL Grand Final",
"2020-11-03": "Melbourne Cup",
"2020-12-25": "Christmas Day",
"2020-12-26": "Boxing Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'WA': {
"2020-01-01", "New Year's Day",
"2020-01-27", "Australia Day",
"2020-03-02", "Labour Day",
"2020-04-10", "Good Friday",
"2020-04-13", "Easter Monday",
"2020-04-25", "ANZAC Day",
"2020-04-27", "ANZAC Day",
"2020-06-01", "Western Australia Day",
"2020-09-28", "Queen's Birthday",
"2020-12-25", "Christmas Day",
"2020-12-26", "Boxing Day",
"2020-12-28", "Boxing Day (Additional day)"
},
}
value_vars=['retail_and_recreation_percent_change_from_baseline',
'grocery_and_pharmacy_percent_change_from_baseline',
'parks_percent_change_from_baseline',
'transit_stations_percent_change_from_baseline',
'workplaces_percent_change_from_baseline',
'residential_percent_change_from_baseline']
mov_values = [ val[:-29]+'_7days' for val in value_vars]
states_initials ={
'Western Australia':'WA',
'South Australia':'SA',
'Tasmania':'TAS',
'Queensland':'QLD',
'New South Wales':'NSW',
'Victoria':'VIC',
'Australian Capital Territory':'ACT',
'Northern Territory':'NT'
}
| public_holidays ={
'ACT': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Canberra Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-01": "Reconciliation Day",
"2020-06-08": "Queen's Birthday",
"2020-10-05": "Labour Day",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day"
},
'NSW': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-01": "Reconciliation Day",
"2020-06-08": "Queen's Birthday",
"2020-08-03": "Bank Holiday",
"2020-10-05": "Labour Day",
"2020-12-25": "Christmas Day",
"2020-12-26": "Christmas Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'NT': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Saturday before Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-05-04": "May Day",
"2020-06-08": "Queen's Birthday",
"2020-08-03": "Picnic Day",
"2020-10-05": "Labour Day",
"2020-12-24": "Christmas Eve",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day",
"2020-12-31": "New Year's Eve"
},
'QLD': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-05-04": "Labour Day",
"2020-10-05": "Queen's Birthday",
"2020-12-25": "Christmas Day",
"2020-12-26": "Boxing Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'SA': {
"2020-01-01": "New Year's Day",
"2020-01-26": "Australia Day",
"2020-01-27": "Australia Day (Additional day)",
"2020-03-09": "Adelaide Cup Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Easter Saturday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-10-05": "Labour Day",
"2020-12-24": "Christmas Eve",
"2020-12-25": "Christmas Day",
"2020-12-28": "Proclamation Day",
"2020-12-31": "New Year's Eve"
},
'TAS': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Eight Hours Day",
"2020-04-10": "Good Friday",
"2020-04-13": "Easter Monday",
"2020-04-14": "Easter Tuesday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-12-25": "Christmas Day",
"2020-12-28": "Boxing Day"
},
'VIC': {
"2020-01-01": "New Year's Day",
"2020-01-27": "Australia Day",
"2020-03-09": "Labour Day",
"2020-04-10": "Good Friday",
"2020-04-11": "Saturday before Easter Sunday",
"2020-04-12": "Easter Sunday",
"2020-04-13": "Easter Monday",
"2020-04-25": "ANZAC Day",
"2020-06-08": "Queen's Birthday",
"2020-09-25": "Friday before AFL Grand Final",
"2020-11-03": "Melbourne Cup",
"2020-12-25": "Christmas Day",
"2020-12-26": "Boxing Day",
"2020-12-28": "Boxing Day (Additional day)"
},
'WA': {
"2020-01-01", "New Year's Day",
"2020-01-27", "Australia Day",
"2020-03-02", "Labour Day",
"2020-04-10", "Good Friday",
"2020-04-13", "Easter Monday",
"2020-04-25", "ANZAC Day",
"2020-04-27", "ANZAC Day",
"2020-06-01", "Western Australia Day",
"2020-09-28", "Queen's Birthday",
"2020-12-25", "Christmas Day",
"2020-12-26", "Boxing Day",
"2020-12-28", "Boxing Day (Additional day)"
},
}
value_vars=['retail_and_recreation_percent_change_from_baseline',
'grocery_and_pharmacy_percent_change_from_baseline',
'parks_percent_change_from_baseline',
'transit_stations_percent_change_from_baseline',
'workplaces_percent_change_from_baseline',
'residential_percent_change_from_baseline']
mov_values = [ val[:-29]+'_7days' for val in value_vars]
states_initials ={
'Western Australia':'WA',
'South Australia':'SA',
'Tasmania':'TAS',
'Queensland':'QLD',
'New South Wales':'NSW',
'Victoria':'VIC',
'Australian Capital Territory':'ACT',
'Northern Territory':'NT'
}
| none | 1 | 1.514108 | 2 |
|
cycle_leds.py | akaplo/METARMap | 0 | 6631763 | import board
import neopixel
import time
import os
LED_COUNT = 50 # Number of LED pixels.
LED_PIN = board.D18 # GPIO pin connected to the pixels (18 is PCM).
LED_ORDER = neopixel.RGB # Strip type and colour ordering
LED_BRIGHTNESS = 0.5
pixels = neopixel.NeoPixel(LED_PIN, LED_COUNT, brightness = LED_BRIGHTNESS)
color_red = (255,0,0)
color_clear = (0,0,0)
pixels[0] = color_red
# turn them all on in groups of 10
for multiplier in [1, 2, 3, 4, 5]:
for idx in [0, 1, 2, 3,4,5,6,7,8]:
p = 10 * multiplier - 10 + idx
if (multiplier + idx != 1):
print("turning on " + str(p))
pixels[p] = color_red
time.sleep(1)
# now turn them all off
for idxi in [0, 1, 2, 3,4,5,6,7,8]:
if (multiplier + idxi != 1):
p = 10 * multiplier - 10 + idxi
print("turning off " + str(p))
pixels[p] = color_clear
pixels.deinit() | import board
import neopixel
import time
import os
LED_COUNT = 50 # Number of LED pixels.
LED_PIN = board.D18 # GPIO pin connected to the pixels (18 is PCM).
LED_ORDER = neopixel.RGB # Strip type and colour ordering
LED_BRIGHTNESS = 0.5
pixels = neopixel.NeoPixel(LED_PIN, LED_COUNT, brightness = LED_BRIGHTNESS)
color_red = (255,0,0)
color_clear = (0,0,0)
pixels[0] = color_red
# turn them all on in groups of 10
for multiplier in [1, 2, 3, 4, 5]:
for idx in [0, 1, 2, 3,4,5,6,7,8]:
p = 10 * multiplier - 10 + idx
if (multiplier + idx != 1):
print("turning on " + str(p))
pixels[p] = color_red
time.sleep(1)
# now turn them all off
for idxi in [0, 1, 2, 3,4,5,6,7,8]:
if (multiplier + idxi != 1):
p = 10 * multiplier - 10 + idxi
print("turning off " + str(p))
pixels[p] = color_clear
pixels.deinit() | en | 0.917117 | # Number of LED pixels. # GPIO pin connected to the pixels (18 is PCM). # Strip type and colour ordering # turn them all on in groups of 10 # now turn them all off | 3.362067 | 3 |
qiskit_experiments/library/characterization/analysis/local_readout_error_analysis.py | QuantumHardware/qiskit-experiments | 1 | 6631764 | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021, 2022.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Analysis class to characterize local readout error
"""
from typing import List, Tuple
import numpy as np
import matplotlib.pyplot as plt
from qiskit.result import LocalReadoutMitigator
from qiskit.result import marginal_counts
from qiskit_experiments.framework import ExperimentData
from qiskit_experiments.framework.matplotlib import get_non_gui_ax
from qiskit_experiments.framework import BaseAnalysis, AnalysisResultData, Options
class LocalReadoutErrorAnalysis(BaseAnalysis):
r"""
Local readout error characterization analysis
# section: overview
This class generates the assignment matrices characterizing the
readout error for each of the given qubits from the experiment result,
and returns the resulting :class:`~qiskit.result.LocalReadoutMitigator`
Each such matrix is a :math:`2\times 2` matrix :math:`A`. Such that :math:`A_{y,x}`
is the probability to observe :math:`y` given the true outcome should be :math:`x`,
where :math:`x,y \in \left\{0,1\right\}` can be 0 and 1.
In the experiment, two circuits are constructed - one for 0 outcome for all
qubits and one for 1 outcome. From the observed results on the circuit, the
probability for each :math:`x,y` is determined, and :math:`A_{y,x}` is set accordingly.
Analysis Results:
* "Local Readout Mitigator": The :class:`~qiskit.result.LocalReadoutMitigator`.
Analysis Figures:
* (Optional) A figure of the assignment matrix.
Note: producing this figure scales exponentially with the number of qubits.
# section: reference
.. ref_arxiv:: 1 2006.14044
"""
@classmethod
def _default_options(cls) -> Options:
"""Return default analysis options.
Analysis Options:
plot (bool): Set ``True`` to create figure for fit result.
ax(AxesSubplot): Optional. A matplotlib axis object to draw.
"""
options = super()._default_options()
# since the plot size grows exponentially with the number of qubits, plotting is off by default
options.plot = False
options.ax = None
return options
def _run_analysis(
self, experiment_data: ExperimentData
) -> Tuple[List[AnalysisResultData], List["matplotlib.figure.Figure"]]:
data = experiment_data.data()
qubits = experiment_data.metadata["physical_qubits"]
matrices = self._generate_matrices(data)
result_mitigator = LocalReadoutMitigator(matrices, qubits=qubits)
analysis_results = [AnalysisResultData("Local Readout Mitigator", result_mitigator)]
if self.options.plot:
figure = assignment_matrix_visualization(
result_mitigator.assignment_matrix(), ax=self.options.ax
)
figures = [figure]
else:
figures = None
return analysis_results, figures
def _generate_matrices(self, data) -> List[np.array]:
num_qubits = len(data[0]["metadata"]["state_label"])
counts = [None, None]
for result in data:
for i in range(2):
if result["metadata"]["state_label"] == str(i) * num_qubits:
counts[i] = result["counts"]
matrices = []
for k in range(num_qubits):
matrix = np.zeros([2, 2], dtype=float)
marginalized_counts = []
for i in range(2):
marginalized_counts.append(marginal_counts(counts[i], [k]))
# matrix[i][j] is the probability of counting i for expected j
for i in range(2):
for j in range(2):
matrix[i][j] = marginalized_counts[j][str(i)] / sum(
marginalized_counts[j].values()
)
matrices.append(matrix)
return matrices
def assignment_matrix_visualization(assignment_matrix, ax=None):
"""Displays a visualization of the assignment matrix compared to the identity"""
if ax is None:
ax = get_non_gui_ax()
figure = ax.get_figure()
n = len(assignment_matrix)
diff = np.abs(assignment_matrix - np.eye(n))
im2 = ax.matshow(diff, cmap=plt.cm.Reds, vmin=0, vmax=0.2)
ax.set_yticks(np.arange(n))
ax.set_xticks(np.arange(n))
ax.set_yticklabels(n * [""])
ax.set_xticklabels(n * [""])
ax.set_title(r"$|A - I |$", fontsize=16)
ax.set_xlabel("Prepared State")
ax.xaxis.set_label_position("top")
ax.set_ylabel("Measured State")
figure.colorbar(im2, ax=ax)
return figure
| # This code is part of Qiskit.
#
# (C) Copyright IBM 2021, 2022.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Analysis class to characterize local readout error
"""
from typing import List, Tuple
import numpy as np
import matplotlib.pyplot as plt
from qiskit.result import LocalReadoutMitigator
from qiskit.result import marginal_counts
from qiskit_experiments.framework import ExperimentData
from qiskit_experiments.framework.matplotlib import get_non_gui_ax
from qiskit_experiments.framework import BaseAnalysis, AnalysisResultData, Options
class LocalReadoutErrorAnalysis(BaseAnalysis):
r"""
Local readout error characterization analysis
# section: overview
This class generates the assignment matrices characterizing the
readout error for each of the given qubits from the experiment result,
and returns the resulting :class:`~qiskit.result.LocalReadoutMitigator`
Each such matrix is a :math:`2\times 2` matrix :math:`A`. Such that :math:`A_{y,x}`
is the probability to observe :math:`y` given the true outcome should be :math:`x`,
where :math:`x,y \in \left\{0,1\right\}` can be 0 and 1.
In the experiment, two circuits are constructed - one for 0 outcome for all
qubits and one for 1 outcome. From the observed results on the circuit, the
probability for each :math:`x,y` is determined, and :math:`A_{y,x}` is set accordingly.
Analysis Results:
* "Local Readout Mitigator": The :class:`~qiskit.result.LocalReadoutMitigator`.
Analysis Figures:
* (Optional) A figure of the assignment matrix.
Note: producing this figure scales exponentially with the number of qubits.
# section: reference
.. ref_arxiv:: 1 2006.14044
"""
@classmethod
def _default_options(cls) -> Options:
"""Return default analysis options.
Analysis Options:
plot (bool): Set ``True`` to create figure for fit result.
ax(AxesSubplot): Optional. A matplotlib axis object to draw.
"""
options = super()._default_options()
# since the plot size grows exponentially with the number of qubits, plotting is off by default
options.plot = False
options.ax = None
return options
def _run_analysis(
self, experiment_data: ExperimentData
) -> Tuple[List[AnalysisResultData], List["matplotlib.figure.Figure"]]:
data = experiment_data.data()
qubits = experiment_data.metadata["physical_qubits"]
matrices = self._generate_matrices(data)
result_mitigator = LocalReadoutMitigator(matrices, qubits=qubits)
analysis_results = [AnalysisResultData("Local Readout Mitigator", result_mitigator)]
if self.options.plot:
figure = assignment_matrix_visualization(
result_mitigator.assignment_matrix(), ax=self.options.ax
)
figures = [figure]
else:
figures = None
return analysis_results, figures
def _generate_matrices(self, data) -> List[np.array]:
num_qubits = len(data[0]["metadata"]["state_label"])
counts = [None, None]
for result in data:
for i in range(2):
if result["metadata"]["state_label"] == str(i) * num_qubits:
counts[i] = result["counts"]
matrices = []
for k in range(num_qubits):
matrix = np.zeros([2, 2], dtype=float)
marginalized_counts = []
for i in range(2):
marginalized_counts.append(marginal_counts(counts[i], [k]))
# matrix[i][j] is the probability of counting i for expected j
for i in range(2):
for j in range(2):
matrix[i][j] = marginalized_counts[j][str(i)] / sum(
marginalized_counts[j].values()
)
matrices.append(matrix)
return matrices
def assignment_matrix_visualization(assignment_matrix, ax=None):
"""Displays a visualization of the assignment matrix compared to the identity"""
if ax is None:
ax = get_non_gui_ax()
figure = ax.get_figure()
n = len(assignment_matrix)
diff = np.abs(assignment_matrix - np.eye(n))
im2 = ax.matshow(diff, cmap=plt.cm.Reds, vmin=0, vmax=0.2)
ax.set_yticks(np.arange(n))
ax.set_xticks(np.arange(n))
ax.set_yticklabels(n * [""])
ax.set_xticklabels(n * [""])
ax.set_title(r"$|A - I |$", fontsize=16)
ax.set_xlabel("Prepared State")
ax.xaxis.set_label_position("top")
ax.set_ylabel("Measured State")
figure.colorbar(im2, ax=ax)
return figure
| en | 0.786827 | # This code is part of Qiskit. # # (C) Copyright IBM 2021, 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. Analysis class to characterize local readout error Local readout error characterization analysis # section: overview This class generates the assignment matrices characterizing the readout error for each of the given qubits from the experiment result, and returns the resulting :class:`~qiskit.result.LocalReadoutMitigator` Each such matrix is a :math:`2\times 2` matrix :math:`A`. Such that :math:`A_{y,x}` is the probability to observe :math:`y` given the true outcome should be :math:`x`, where :math:`x,y \in \left\{0,1\right\}` can be 0 and 1. In the experiment, two circuits are constructed - one for 0 outcome for all qubits and one for 1 outcome. From the observed results on the circuit, the probability for each :math:`x,y` is determined, and :math:`A_{y,x}` is set accordingly. Analysis Results: * "Local Readout Mitigator": The :class:`~qiskit.result.LocalReadoutMitigator`. Analysis Figures: * (Optional) A figure of the assignment matrix. Note: producing this figure scales exponentially with the number of qubits. # section: reference .. ref_arxiv:: 1 2006.14044 Return default analysis options. Analysis Options: plot (bool): Set ``True`` to create figure for fit result. ax(AxesSubplot): Optional. A matplotlib axis object to draw. # since the plot size grows exponentially with the number of qubits, plotting is off by default # matrix[i][j] is the probability of counting i for expected j Displays a visualization of the assignment matrix compared to the identity | 2.939578 | 3 |
06.EntryFeild/04.TextInput.py | sarincr/Python-App-Development-using-Kivy | 1 | 6631765 | <gh_stars>1-10
from kivy.app import App
from kivy.uix.textinput import TextInput
class SimpleApp(App):
def build(self):
t = TextInput(font_size=150)
return t
if __name__ == "__main__":
SimpleApp().run()
| from kivy.app import App
from kivy.uix.textinput import TextInput
class SimpleApp(App):
def build(self):
t = TextInput(font_size=150)
return t
if __name__ == "__main__":
SimpleApp().run() | none | 1 | 2.197716 | 2 |
|
src/tests/tests.py | mariiashybetska/currency_exchange | 0 | 6631766 | import pytest
import requests
from decimal import Decimal
from django.urls import reverse
from currency.tasks import _pb, _mono
from currency.models import Rate
def test_sanity():
assert 200 == 200
def test_index_page(client):
url = reverse('index')
response = client.get(url)
assert response.status_code == 200
def test_rates_not_auth(client):
url = reverse('api-currency:rates')
response = client.get(url)
assert response.status_code == 401
resp_j = response.json()
assert len(resp_j) == 1
assert resp_j['detail'] == 'Authentication credentials were not provided.'
def test_rates_auth(api_client, user):
url = reverse('api-currency:rates')
response = api_client.get(url)
assert response.status_code == 401
api_client.login(user.username, user.raw_password)
response = api_client.get(url)
assert response.status_code == 200
def test_get_rates(api_client, user):
url = reverse('api-currency:rates')
api_client.login(user.email, user.raw_password)
response = api_client.get(url)
assert response.status_code == 200
def test_send_email():
from django.core import mail
from account.tasks import send_activation_code_async
from uuid import uuid4
emails = mail.outbox
print('EMAILS:', emails)
send_activation_code_async.delay(1, str(uuid4()))
emails = mail.outbox
assert len(emails) == 1
email = mail.outbox[0]
assert email.subject == 'Your activation code'
# homework
class Response:
pass
def test_task_pb(mocker):
def mock():
response = Response()
response.json = lambda: [
{"ccy": "USD", "base_ccy": "UAH", "buy": "27.10", "sale": "27.65"},
{"ccy": "EUR", "base_ccy": "UAH", "buy": "29.20", "sale": "29.86"},
{"ccy": "RUR", "base_ccy": "UAH", "buy": "0.34", "sale": "0.36"},
{"ccy": "BTC", "base_ccy": "USD", "buy": "6464.6154", "sale": "7145.1012"},
]
return response
requests_get_patcher = mocker.patch('requests.get')
requests_get_patcher.return_value = mock()
Rate.objects.all().delete()
_pb()
rate = Rate.objects.all()
assert len(rate) == 2
assert rate[0].currency == 1
assert rate[0].buy == Decimal('27.20')
assert rate[0].sale == Decimal('27.62')
assert rate[0].source == 1
assert rate[1].currency == 2
assert rate[1].buy == Decimal('29.30')
assert rate[1].sale == Decimal('29.85')
assert rate[1].source == 1
Rate.objects.all().delete()
def test_task_mono(mocker):
def mock():
response = Response()
response.json = lambda: [
{"currencyCodeA": 840, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 27.35, "rateSell": 27.62},
{"currencyCodeA": 978, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 29.45, "rateSell": 29.83},
{"currencyCodeA": 643, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 0.315, "rateSell": 0.36},
{"currencyCodeA": 978, "currencyCodeB": 840, "date": 1585948209, "rateBuy": 1.0863, "rateSell": 1.11},
]
return response
requests_get_patcher = mocker.patch('requests.get')
requests_get_patcher.return_value = mock()
_mono()
rate = Rate.objects.all()
assert len(rate) == 2
assert rate[0].currency == 1
assert rate[0].buy == Decimal('27.35')
assert rate[0].sale == Decimal('27.62')
assert rate[0].source == 2
assert rate[1].currency == 2
assert rate[1].buy == Decimal('29.45')
assert rate[1].sale == Decimal('29.83')
assert rate[1].source == 2
Rate.objects.all().delete() | import pytest
import requests
from decimal import Decimal
from django.urls import reverse
from currency.tasks import _pb, _mono
from currency.models import Rate
def test_sanity():
assert 200 == 200
def test_index_page(client):
url = reverse('index')
response = client.get(url)
assert response.status_code == 200
def test_rates_not_auth(client):
url = reverse('api-currency:rates')
response = client.get(url)
assert response.status_code == 401
resp_j = response.json()
assert len(resp_j) == 1
assert resp_j['detail'] == 'Authentication credentials were not provided.'
def test_rates_auth(api_client, user):
url = reverse('api-currency:rates')
response = api_client.get(url)
assert response.status_code == 401
api_client.login(user.username, user.raw_password)
response = api_client.get(url)
assert response.status_code == 200
def test_get_rates(api_client, user):
url = reverse('api-currency:rates')
api_client.login(user.email, user.raw_password)
response = api_client.get(url)
assert response.status_code == 200
def test_send_email():
from django.core import mail
from account.tasks import send_activation_code_async
from uuid import uuid4
emails = mail.outbox
print('EMAILS:', emails)
send_activation_code_async.delay(1, str(uuid4()))
emails = mail.outbox
assert len(emails) == 1
email = mail.outbox[0]
assert email.subject == 'Your activation code'
# homework
class Response:
pass
def test_task_pb(mocker):
def mock():
response = Response()
response.json = lambda: [
{"ccy": "USD", "base_ccy": "UAH", "buy": "27.10", "sale": "27.65"},
{"ccy": "EUR", "base_ccy": "UAH", "buy": "29.20", "sale": "29.86"},
{"ccy": "RUR", "base_ccy": "UAH", "buy": "0.34", "sale": "0.36"},
{"ccy": "BTC", "base_ccy": "USD", "buy": "6464.6154", "sale": "7145.1012"},
]
return response
requests_get_patcher = mocker.patch('requests.get')
requests_get_patcher.return_value = mock()
Rate.objects.all().delete()
_pb()
rate = Rate.objects.all()
assert len(rate) == 2
assert rate[0].currency == 1
assert rate[0].buy == Decimal('27.20')
assert rate[0].sale == Decimal('27.62')
assert rate[0].source == 1
assert rate[1].currency == 2
assert rate[1].buy == Decimal('29.30')
assert rate[1].sale == Decimal('29.85')
assert rate[1].source == 1
Rate.objects.all().delete()
def test_task_mono(mocker):
def mock():
response = Response()
response.json = lambda: [
{"currencyCodeA": 840, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 27.35, "rateSell": 27.62},
{"currencyCodeA": 978, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 29.45, "rateSell": 29.83},
{"currencyCodeA": 643, "currencyCodeB": 980, "date": 1585948209, "rateBuy": 0.315, "rateSell": 0.36},
{"currencyCodeA": 978, "currencyCodeB": 840, "date": 1585948209, "rateBuy": 1.0863, "rateSell": 1.11},
]
return response
requests_get_patcher = mocker.patch('requests.get')
requests_get_patcher.return_value = mock()
_mono()
rate = Rate.objects.all()
assert len(rate) == 2
assert rate[0].currency == 1
assert rate[0].buy == Decimal('27.35')
assert rate[0].sale == Decimal('27.62')
assert rate[0].source == 2
assert rate[1].currency == 2
assert rate[1].buy == Decimal('29.45')
assert rate[1].sale == Decimal('29.83')
assert rate[1].source == 2
Rate.objects.all().delete() | en | 0.970571 | # homework | 2.17094 | 2 |
puntosvista/migrations/0005_auto_20210913_1533.py | ErickMurillo/cantera | 0 | 6631767 | <gh_stars>0
# Generated by Django 2.1.7 on 2021-09-13 15:33
from django.db import migrations
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
('puntosvista', '0004_auto_20190923_0906'),
]
operations = [
migrations.AlterField(
model_name='puntos',
name='foto',
field=sorl.thumbnail.fields.ImageField(help_text='830x620', upload_to='puntos-vista/', verbose_name='Foto'),
),
]
| # Generated by Django 2.1.7 on 2021-09-13 15:33
from django.db import migrations
import sorl.thumbnail.fields
class Migration(migrations.Migration):
dependencies = [
('puntosvista', '0004_auto_20190923_0906'),
]
operations = [
migrations.AlterField(
model_name='puntos',
name='foto',
field=sorl.thumbnail.fields.ImageField(help_text='830x620', upload_to='puntos-vista/', verbose_name='Foto'),
),
] | en | 0.781801 | # Generated by Django 2.1.7 on 2021-09-13 15:33 | 1.334535 | 1 |
solutions/2020/kws/day_12.py | kws/AdventOfCode | 1 | 6631768 | #!/usr/bin/env python
import argparse
import math
from collections import namedtuple
from time import sleep
Coordinates = namedtuple('Coordinates', 'x y')
class Ship:
def __init__(self, x, y, heading: int):
self.x = x
self.y = y
self.heading = heading
def __repr__(self):
x = "E" if self.x >= 0 else "W"
y = "S" if self.y >= 0 else "N"
return f"{abs(self.x)}{x} {abs(self.y)}{y} {self.heading}°"
class Navigator:
@staticmethod
def move(ship, instruction):
method = instruction[0]
value = int(instruction[1:])
function = getattr(Navigator, method)
function(ship, value)
@staticmethod
def N(ship: Ship, speed):
ship.y = ship.y - speed
@staticmethod
def S(ship: Ship, speed):
ship.y = ship.y + speed
@staticmethod
def E(ship: Ship, speed):
ship.x = ship.x + speed
@staticmethod
def W(ship: Ship, speed):
ship.x = ship.x - speed
@staticmethod
def L(ship: Ship, degrees):
ship.heading = (ship.heading + degrees) % 360
@staticmethod
def R(ship: Ship, degrees):
ship.heading = (ship.heading - degrees) % 360
@staticmethod
def F(ship: Ship, speed):
offset_x = math.sin(math.radians(ship.heading))
offset_y = math.cos(math.radians(ship.heading))
ship.x = round(ship.x + speed * offset_x)
ship.y = round(ship.y + speed * offset_y)
class WayPointNavigator:
@staticmethod
def move(ship, waypoint, instruction):
method = instruction[0]
value = int(instruction[1:])
function = getattr(WayPointNavigator, method)
function(ship, waypoint, value)
@staticmethod
def rotate(px, py, angle):
dx = math.cos(angle) * px - math.sin(angle) * py
dy = math.sin(angle) * px + math.cos(angle) * py
return round(dx), round(dy)
@staticmethod
def N(ship: Ship, waypoint: Ship, value):
waypoint.y = waypoint.y - value
@staticmethod
def S(ship: Ship, waypoint: Ship, value):
waypoint.y = waypoint.y + value
@staticmethod
def E(ship: Ship, waypoint: Ship, value):
waypoint.x = waypoint.x + value
@staticmethod
def W(ship: Ship, waypoint: Ship, value):
waypoint.x = waypoint.x - value
@staticmethod
def L(ship: Ship, waypoint: Ship, degrees):
waypoint.x, waypoint.y = WayPointNavigator.rotate(waypoint.x, waypoint.y, math.radians(-degrees))
@staticmethod
def R(ship: Ship, waypoint: Ship, degrees):
waypoint.x, waypoint.y = WayPointNavigator.rotate(waypoint.x, waypoint.y, math.radians(degrees))
@staticmethod
def F(ship: Ship, waypoint: Ship, speed):
ship.x = ship.x + waypoint.x * speed
ship.y = ship.y + waypoint.y * speed
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Day 12 of Advent of Code 2020')
parser.add_argument('file', metavar='filename', type=argparse.FileType('rt'),
help='filename to your personal inputs')
parser.add_argument('--debug', '-d', action='store_true', help='Print debug output of maps')
parser.add_argument('--turtle', action='store_true', help='Add a turtle')
args = parser.parse_args()
with args.file as FILE:
input_lines = FILE.readlines()
input_lines = [i.strip() for i in input_lines if len(i.strip()) > 0]
ShipShape = namedtuple("ShipShape", "instruction x y heading text")
ship = Ship(0, 0, 90)
states = []
for i in input_lines:
Navigator.move(ship, i)
states.append(ShipShape(i, ship.x, ship.y, ship.heading, repr(ship)))
max_x = max([0] + [ship.x for ship in states])
min_x = min([0] + [ship.x for ship in states])
max_y = max([0] + [ship.y for ship in states])
min_y = min([0] + [ship.y for ship in states])
x_scale = max_x - min_x
y_scale = max_y - min_y
if args.turtle:
import turtle
screen = turtle.Screen()
turtle.tracer(3)
screen.setworldcoordinates(min_x - x_scale/10, min_y - y_scale/10, max_x + x_scale/10, max_y + y_scale/10)
turtle_ship = turtle.Turtle()
for ship in states:
if args.turtle:
turtle_ship.setheading(90-ship.heading)
turtle_ship.goto(ship.x, ship.y)
turtle.update()
if args.debug:
print(ship.instruction, ship.text)
print(f"At the end of part 1 the ship is at {ship} with a manhattan distance of {abs(ship.x) + abs(ship.y)}")
if args.turtle:
sleep(2)
states = []
waypoints = []
ship = Ship(0, 0, 0)
waypoint = Ship(10, -1, 0)
for i in input_lines:
WayPointNavigator.move(ship, waypoint, i)
states.append(ShipShape(i, ship.x, ship.y, ship.heading, repr(ship)))
waypoints.append(ShipShape(i, waypoint.x, waypoint.y, waypoint.heading, repr(waypoint)))
max_x = max([0] + [ship.x for ship in states] + [ship.x for ship in waypoints])
min_x = min([0] + [ship.x for ship in states] + [ship.x for ship in waypoints])
max_y = max([0] + [ship.y for ship in states] + [ship.y for ship in waypoints])
min_y = min([0] + [ship.y for ship in states] + [ship.y for ship in waypoints])
x_scale = max_x - min_x
y_scale = max_y - min_y
if args.turtle:
screen.reset()
screen.setworldcoordinates(min_x - x_scale/10, min_y - y_scale/10, max_x + x_scale/10, max_y + y_scale/10)
turtle_ship.shape("circle")
turtle_ship.turtlesize(.1, .1)
turtle_waypoint = turtle.Turtle()
turtle_waypoint.shape("square")
turtle_waypoint.turtlesize(.1, .1)
turtle_waypoint.color("red")
for ix, ship in enumerate(states):
waypoint = waypoints[ix]
if args.turtle:
turtle_ship.setheading(90 - ship.heading)
turtle_waypoint.goto(ship.x + waypoint.x, ship.y + waypoint.y)
turtle_ship.goto(ship.x, ship.y)
turtle.update()
if args.debug:
print(ship.instruction, ship.text, waypoint.text)
print(f"At the end of part 2 the ship is at {ship} with a manhattan distance of {abs(ship.x) + abs(ship.y)}")
if args.turtle:
input("Press any key to continue")
| #!/usr/bin/env python
import argparse
import math
from collections import namedtuple
from time import sleep
Coordinates = namedtuple('Coordinates', 'x y')
class Ship:
def __init__(self, x, y, heading: int):
self.x = x
self.y = y
self.heading = heading
def __repr__(self):
x = "E" if self.x >= 0 else "W"
y = "S" if self.y >= 0 else "N"
return f"{abs(self.x)}{x} {abs(self.y)}{y} {self.heading}°"
class Navigator:
@staticmethod
def move(ship, instruction):
method = instruction[0]
value = int(instruction[1:])
function = getattr(Navigator, method)
function(ship, value)
@staticmethod
def N(ship: Ship, speed):
ship.y = ship.y - speed
@staticmethod
def S(ship: Ship, speed):
ship.y = ship.y + speed
@staticmethod
def E(ship: Ship, speed):
ship.x = ship.x + speed
@staticmethod
def W(ship: Ship, speed):
ship.x = ship.x - speed
@staticmethod
def L(ship: Ship, degrees):
ship.heading = (ship.heading + degrees) % 360
@staticmethod
def R(ship: Ship, degrees):
ship.heading = (ship.heading - degrees) % 360
@staticmethod
def F(ship: Ship, speed):
offset_x = math.sin(math.radians(ship.heading))
offset_y = math.cos(math.radians(ship.heading))
ship.x = round(ship.x + speed * offset_x)
ship.y = round(ship.y + speed * offset_y)
class WayPointNavigator:
@staticmethod
def move(ship, waypoint, instruction):
method = instruction[0]
value = int(instruction[1:])
function = getattr(WayPointNavigator, method)
function(ship, waypoint, value)
@staticmethod
def rotate(px, py, angle):
dx = math.cos(angle) * px - math.sin(angle) * py
dy = math.sin(angle) * px + math.cos(angle) * py
return round(dx), round(dy)
@staticmethod
def N(ship: Ship, waypoint: Ship, value):
waypoint.y = waypoint.y - value
@staticmethod
def S(ship: Ship, waypoint: Ship, value):
waypoint.y = waypoint.y + value
@staticmethod
def E(ship: Ship, waypoint: Ship, value):
waypoint.x = waypoint.x + value
@staticmethod
def W(ship: Ship, waypoint: Ship, value):
waypoint.x = waypoint.x - value
@staticmethod
def L(ship: Ship, waypoint: Ship, degrees):
waypoint.x, waypoint.y = WayPointNavigator.rotate(waypoint.x, waypoint.y, math.radians(-degrees))
@staticmethod
def R(ship: Ship, waypoint: Ship, degrees):
waypoint.x, waypoint.y = WayPointNavigator.rotate(waypoint.x, waypoint.y, math.radians(degrees))
@staticmethod
def F(ship: Ship, waypoint: Ship, speed):
ship.x = ship.x + waypoint.x * speed
ship.y = ship.y + waypoint.y * speed
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Day 12 of Advent of Code 2020')
parser.add_argument('file', metavar='filename', type=argparse.FileType('rt'),
help='filename to your personal inputs')
parser.add_argument('--debug', '-d', action='store_true', help='Print debug output of maps')
parser.add_argument('--turtle', action='store_true', help='Add a turtle')
args = parser.parse_args()
with args.file as FILE:
input_lines = FILE.readlines()
input_lines = [i.strip() for i in input_lines if len(i.strip()) > 0]
ShipShape = namedtuple("ShipShape", "instruction x y heading text")
ship = Ship(0, 0, 90)
states = []
for i in input_lines:
Navigator.move(ship, i)
states.append(ShipShape(i, ship.x, ship.y, ship.heading, repr(ship)))
max_x = max([0] + [ship.x for ship in states])
min_x = min([0] + [ship.x for ship in states])
max_y = max([0] + [ship.y for ship in states])
min_y = min([0] + [ship.y for ship in states])
x_scale = max_x - min_x
y_scale = max_y - min_y
if args.turtle:
import turtle
screen = turtle.Screen()
turtle.tracer(3)
screen.setworldcoordinates(min_x - x_scale/10, min_y - y_scale/10, max_x + x_scale/10, max_y + y_scale/10)
turtle_ship = turtle.Turtle()
for ship in states:
if args.turtle:
turtle_ship.setheading(90-ship.heading)
turtle_ship.goto(ship.x, ship.y)
turtle.update()
if args.debug:
print(ship.instruction, ship.text)
print(f"At the end of part 1 the ship is at {ship} with a manhattan distance of {abs(ship.x) + abs(ship.y)}")
if args.turtle:
sleep(2)
states = []
waypoints = []
ship = Ship(0, 0, 0)
waypoint = Ship(10, -1, 0)
for i in input_lines:
WayPointNavigator.move(ship, waypoint, i)
states.append(ShipShape(i, ship.x, ship.y, ship.heading, repr(ship)))
waypoints.append(ShipShape(i, waypoint.x, waypoint.y, waypoint.heading, repr(waypoint)))
max_x = max([0] + [ship.x for ship in states] + [ship.x for ship in waypoints])
min_x = min([0] + [ship.x for ship in states] + [ship.x for ship in waypoints])
max_y = max([0] + [ship.y for ship in states] + [ship.y for ship in waypoints])
min_y = min([0] + [ship.y for ship in states] + [ship.y for ship in waypoints])
x_scale = max_x - min_x
y_scale = max_y - min_y
if args.turtle:
screen.reset()
screen.setworldcoordinates(min_x - x_scale/10, min_y - y_scale/10, max_x + x_scale/10, max_y + y_scale/10)
turtle_ship.shape("circle")
turtle_ship.turtlesize(.1, .1)
turtle_waypoint = turtle.Turtle()
turtle_waypoint.shape("square")
turtle_waypoint.turtlesize(.1, .1)
turtle_waypoint.color("red")
for ix, ship in enumerate(states):
waypoint = waypoints[ix]
if args.turtle:
turtle_ship.setheading(90 - ship.heading)
turtle_waypoint.goto(ship.x + waypoint.x, ship.y + waypoint.y)
turtle_ship.goto(ship.x, ship.y)
turtle.update()
if args.debug:
print(ship.instruction, ship.text, waypoint.text)
print(f"At the end of part 2 the ship is at {ship} with a manhattan distance of {abs(ship.x) + abs(ship.y)}")
if args.turtle:
input("Press any key to continue")
| ru | 0.26433 | #!/usr/bin/env python | 3.758711 | 4 |
CNN/model.py | woodyZootopia/CuneiformSeg | 2 | 6631769 | from keras import backend as K
from keras.engine.topology import Input, Container
from keras.models import Model
from keras.layers import *
from keras.optimizers import *
K.set_image_dim_ordering("tf")
print("image_dim_ordering:tf")
def get_net(size, classes, session=None):
inputs = Input(shape=(size, size, 3))
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(inputs)
# out = GlobalAveragePooling2D()(out)
midout = Flatten()(midout)
midout = Dense(100, activation="relu",
kernel_initializer="he_normal")(midout)
out = Dense(classes, activation="sigmoid")(midout)
if session is not None:
K.set_session(session)
model = Model(inputs=inputs, outputs=out)
model.compile(Adam(lr=1.5e-4), loss="binary_crossentropy",
metrics=["accuracy"])
return(model)
def get_net2(size, classes, session=None):
inputs = Input(shape=(size, size, 3))
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(inputs)
midout = BatchNormalization()(midout)
midout = MaxPooling2D(pool_size=(2, 2))(midout)
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(midout)
midout = BatchNormalization()(midout)
midout = MaxPooling2D(pool_size=(2, 2))(midout)
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(midout)
midout = Flatten()(midout)
midout = Dense(100, activation="relu",
kernel_initializer="he_normal")(midout)
out = Dense(classes, activation="sigmoid")(midout)
if session is not None:
K.set_session(session)
model = Model(inputs=inputs, outputs=out)
model.compile(Adam(lr=1.5e-4), loss="binary_crossentropy",
metrics=["accuracy"])
return(model)
| from keras import backend as K
from keras.engine.topology import Input, Container
from keras.models import Model
from keras.layers import *
from keras.optimizers import *
K.set_image_dim_ordering("tf")
print("image_dim_ordering:tf")
def get_net(size, classes, session=None):
inputs = Input(shape=(size, size, 3))
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(inputs)
# out = GlobalAveragePooling2D()(out)
midout = Flatten()(midout)
midout = Dense(100, activation="relu",
kernel_initializer="he_normal")(midout)
out = Dense(classes, activation="sigmoid")(midout)
if session is not None:
K.set_session(session)
model = Model(inputs=inputs, outputs=out)
model.compile(Adam(lr=1.5e-4), loss="binary_crossentropy",
metrics=["accuracy"])
return(model)
def get_net2(size, classes, session=None):
inputs = Input(shape=(size, size, 3))
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(inputs)
midout = BatchNormalization()(midout)
midout = MaxPooling2D(pool_size=(2, 2))(midout)
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(midout)
midout = BatchNormalization()(midout)
midout = MaxPooling2D(pool_size=(2, 2))(midout)
midout = Conv2D(4, 3, activation="relu",
kernel_initializer="he_normal")(midout)
midout = Flatten()(midout)
midout = Dense(100, activation="relu",
kernel_initializer="he_normal")(midout)
out = Dense(classes, activation="sigmoid")(midout)
if session is not None:
K.set_session(session)
model = Model(inputs=inputs, outputs=out)
model.compile(Adam(lr=1.5e-4), loss="binary_crossentropy",
metrics=["accuracy"])
return(model)
| en | 0.427973 | # out = GlobalAveragePooling2D()(out) | 2.891778 | 3 |
owlbear/request.py | gsmcwhirter/owlbear | 0 | 6631770 | # -*- coding: utf-8 -*-
"""Classes wrapping ASGI requests in a nicer interface"""
import http.cookies
import re
from typing import Tuple
import urllib.parse
class FormDataError(Exception):
"""Represents an error handling form data"""
pass
class RequestData:
"""Simple object container for attaching data to a request"""
pass
# parts from https://github.com/defnull/multipart/blob/master/multipart.py
# MIT license Copyright (c) 2010, <NAME>
_special = re.escape('()<>@,;:\\"/[]?={} \t')
_re_special = re.compile('[%s]' % _special)
_qstr = '"(?:\\\\.|[^"])*"'
_value = '(?:[^%s]+|%s)' % (_special, _qstr)
_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value)
_re_option = re.compile(_option) # key=value part of an Content-Type like header
class Request:
"""Class to wrap an ASGI request"""
__slots__ = (
'app', 'raw_request', 'data', '_headers',
'_body', '_body_channel', '_query_args',
'_form_values', '_form_files', '_form_parsed',
'_cookies', )
def __init__(self, app: 'owlbear.app.Owlbear', raw_request: dict, body_channel=None):
self.app = app
self.raw_request = raw_request
self.data = RequestData()
self._headers: dict = None
self._body_channel = body_channel
self._body = None
self._query_args = None
self._form_values = None
self._form_files = None
self._form_parsed = False
self._cookies = None
def __str__(self):
return f"{self.method} {self.path}"
@property
def path(self) -> str:
"""Return the request uri_path"""
return self.raw_request.get('path')
@property
def query_string(self) -> str:
"""Return the request query string"""
return self.raw_request.get('query_string').decode()
@staticmethod
def _fix_query_args(query_args):
for key in query_args.keys():
if not key.endswith("[]"):
if len(query_args[key]) == 1:
query_args[key] = query_args[key][0]
elif len(query_args[key]) == 0:
query_args[key] = ''
@property
def query_args(self) -> dict:
"""Return the parsed query string args"""
if self._query_args is None:
if self.query_string is not None:
self._query_args = urllib.parse.parse_qs(self.query_string, keep_blank_values=True)
else:
self._query_args = {}
self._fix_query_args(self._query_args)
return self._query_args
@property
def cookies(self) -> dict:
"""Return cookie values"""
if self._cookies is None:
self._cookies = {}
cookie_header = self.headers.get('cookie')
if cookie_header:
cookie_parser = http.cookies.SimpleCookie(cookie_header)
for key, morsel in cookie_parser.items():
self._cookies[key] = morsel.value
return self._cookies
@staticmethod
def _header_unquote(val, filename=False):
if val[0] == val[-1] == '"':
val = val[1:-1]
if val[1:3] == ':\\' or val[:2] == '\\\\':
val = val.split('\\')[-1] # fix ie6 bug: full path --> filename
return val.replace('\\\\', '\\').replace('\\"', '"')
return val
@classmethod
def _parse_options_header(cls, header: str, options=None) -> Tuple[str, dict]:
if ';' not in header:
return header.lower().strip(), {}
ctype, tail = header.split(';', 1)
options = options or {}
for match in _re_option.finditer(tail):
key = match.group(1).lower()
value = cls._header_unquote(match.group(2), key=='filename')
options[key] = value
return ctype, options
async def parse_form(self, *, charset='utf8'):
if self._form_parsed:
return
# TODO: files
form_data, files = {}, {}
if self.method not in ('POST', 'PUT'):
raise FormDataError("Request method other than POST or PUT.")
try:
content_length = int(self.headers.get('content-length'))
except TypeError:
content_length = -1
content_type = self.headers.get('content-type', '')
if not content_type:
raise FormDataError("Missing Content-Type header.")
content_type, options = self._parse_options_header(content_type)
charset = options.get('charset', charset)
if content_type == 'multipart/form-data':
pass
# boundary = options.get('boundary', '')
# if not boundary:
# raise MultipartError("No boundary for multipart/form-data.")
# for part in MultipartParser(stream, boundary, content_length, **kw):
# if part.filename or not part.is_buffered():
# files[part.name] = part
# else: # TODO: Big form-fields are in the files dict. really?
# forms[part.name] = part.value
elif content_type in ('application/x-www-form-urlencoded',
'application/x-url-encoded'):
form_data_raw = await self.read_body(encoding=charset)
form_data = urllib.parse.parse_qs(form_data_raw, keep_blank_values=True)
self._fix_query_args(form_data)
else:
raise FormDataError("Unsupported content type.")
self._form_values, self._form_files = form_data, files
self._form_parsed = True
@property
def form(self) -> dict:
"""Return the parsed form data, if any"""
if self._form_values is None:
raise FormDataError("You must call request.parse_form first")
return self._form_values
@property
def host(self) -> str:
"""Return the request host"""
return self.headers.get('host')
@property
def scheme(self) -> str:
"""Return the request scheme"""
return self.raw_request.get('scheme')
@property
def method(self) -> str:
"""Return the request verb"""
return self.raw_request.get('method').upper()
@property
def headers(self) -> dict:
"""Return the request headers"""
if self._headers is None:
self._headers = {}
for header_name, header_val in self.raw_request.get('headers', []):
header_name = header_name.decode('ascii').lower()
header_val = header_val.decode('ascii')
self._headers[header_name] = header_val
return self._headers
async def read_body(self, encoding=None):
"""Read the request body, if there is one"""
if self._body is None:
self._body = self.raw_request.get('body', b'')
if self._body_channel:
while True:
chunk = await self._body_channel.receive()
self._body += chunk['content']
if not chunk.get('more_content'):
break
if encoding:
return self._body.decode(encoding)
else:
return self._body
| # -*- coding: utf-8 -*-
"""Classes wrapping ASGI requests in a nicer interface"""
import http.cookies
import re
from typing import Tuple
import urllib.parse
class FormDataError(Exception):
"""Represents an error handling form data"""
pass
class RequestData:
"""Simple object container for attaching data to a request"""
pass
# parts from https://github.com/defnull/multipart/blob/master/multipart.py
# MIT license Copyright (c) 2010, <NAME>
_special = re.escape('()<>@,;:\\"/[]?={} \t')
_re_special = re.compile('[%s]' % _special)
_qstr = '"(?:\\\\.|[^"])*"'
_value = '(?:[^%s]+|%s)' % (_special, _qstr)
_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value)
_re_option = re.compile(_option) # key=value part of an Content-Type like header
class Request:
"""Class to wrap an ASGI request"""
__slots__ = (
'app', 'raw_request', 'data', '_headers',
'_body', '_body_channel', '_query_args',
'_form_values', '_form_files', '_form_parsed',
'_cookies', )
def __init__(self, app: 'owlbear.app.Owlbear', raw_request: dict, body_channel=None):
self.app = app
self.raw_request = raw_request
self.data = RequestData()
self._headers: dict = None
self._body_channel = body_channel
self._body = None
self._query_args = None
self._form_values = None
self._form_files = None
self._form_parsed = False
self._cookies = None
def __str__(self):
return f"{self.method} {self.path}"
@property
def path(self) -> str:
"""Return the request uri_path"""
return self.raw_request.get('path')
@property
def query_string(self) -> str:
"""Return the request query string"""
return self.raw_request.get('query_string').decode()
@staticmethod
def _fix_query_args(query_args):
for key in query_args.keys():
if not key.endswith("[]"):
if len(query_args[key]) == 1:
query_args[key] = query_args[key][0]
elif len(query_args[key]) == 0:
query_args[key] = ''
@property
def query_args(self) -> dict:
"""Return the parsed query string args"""
if self._query_args is None:
if self.query_string is not None:
self._query_args = urllib.parse.parse_qs(self.query_string, keep_blank_values=True)
else:
self._query_args = {}
self._fix_query_args(self._query_args)
return self._query_args
@property
def cookies(self) -> dict:
"""Return cookie values"""
if self._cookies is None:
self._cookies = {}
cookie_header = self.headers.get('cookie')
if cookie_header:
cookie_parser = http.cookies.SimpleCookie(cookie_header)
for key, morsel in cookie_parser.items():
self._cookies[key] = morsel.value
return self._cookies
@staticmethod
def _header_unquote(val, filename=False):
if val[0] == val[-1] == '"':
val = val[1:-1]
if val[1:3] == ':\\' or val[:2] == '\\\\':
val = val.split('\\')[-1] # fix ie6 bug: full path --> filename
return val.replace('\\\\', '\\').replace('\\"', '"')
return val
@classmethod
def _parse_options_header(cls, header: str, options=None) -> Tuple[str, dict]:
if ';' not in header:
return header.lower().strip(), {}
ctype, tail = header.split(';', 1)
options = options or {}
for match in _re_option.finditer(tail):
key = match.group(1).lower()
value = cls._header_unquote(match.group(2), key=='filename')
options[key] = value
return ctype, options
async def parse_form(self, *, charset='utf8'):
if self._form_parsed:
return
# TODO: files
form_data, files = {}, {}
if self.method not in ('POST', 'PUT'):
raise FormDataError("Request method other than POST or PUT.")
try:
content_length = int(self.headers.get('content-length'))
except TypeError:
content_length = -1
content_type = self.headers.get('content-type', '')
if not content_type:
raise FormDataError("Missing Content-Type header.")
content_type, options = self._parse_options_header(content_type)
charset = options.get('charset', charset)
if content_type == 'multipart/form-data':
pass
# boundary = options.get('boundary', '')
# if not boundary:
# raise MultipartError("No boundary for multipart/form-data.")
# for part in MultipartParser(stream, boundary, content_length, **kw):
# if part.filename or not part.is_buffered():
# files[part.name] = part
# else: # TODO: Big form-fields are in the files dict. really?
# forms[part.name] = part.value
elif content_type in ('application/x-www-form-urlencoded',
'application/x-url-encoded'):
form_data_raw = await self.read_body(encoding=charset)
form_data = urllib.parse.parse_qs(form_data_raw, keep_blank_values=True)
self._fix_query_args(form_data)
else:
raise FormDataError("Unsupported content type.")
self._form_values, self._form_files = form_data, files
self._form_parsed = True
@property
def form(self) -> dict:
"""Return the parsed form data, if any"""
if self._form_values is None:
raise FormDataError("You must call request.parse_form first")
return self._form_values
@property
def host(self) -> str:
"""Return the request host"""
return self.headers.get('host')
@property
def scheme(self) -> str:
"""Return the request scheme"""
return self.raw_request.get('scheme')
@property
def method(self) -> str:
"""Return the request verb"""
return self.raw_request.get('method').upper()
@property
def headers(self) -> dict:
"""Return the request headers"""
if self._headers is None:
self._headers = {}
for header_name, header_val in self.raw_request.get('headers', []):
header_name = header_name.decode('ascii').lower()
header_val = header_val.decode('ascii')
self._headers[header_name] = header_val
return self._headers
async def read_body(self, encoding=None):
"""Read the request body, if there is one"""
if self._body is None:
self._body = self.raw_request.get('body', b'')
if self._body_channel:
while True:
chunk = await self._body_channel.receive()
self._body += chunk['content']
if not chunk.get('more_content'):
break
if encoding:
return self._body.decode(encoding)
else:
return self._body
| en | 0.54917 | # -*- coding: utf-8 -*- Classes wrapping ASGI requests in a nicer interface Represents an error handling form data Simple object container for attaching data to a request # parts from https://github.com/defnull/multipart/blob/master/multipart.py # MIT license Copyright (c) 2010, <NAME> # key=value part of an Content-Type like header Class to wrap an ASGI request Return the request uri_path Return the request query string Return the parsed query string args Return cookie values # fix ie6 bug: full path --> filename # TODO: files # boundary = options.get('boundary', '') # if not boundary: # raise MultipartError("No boundary for multipart/form-data.") # for part in MultipartParser(stream, boundary, content_length, **kw): # if part.filename or not part.is_buffered(): # files[part.name] = part # else: # TODO: Big form-fields are in the files dict. really? # forms[part.name] = part.value Return the parsed form data, if any Return the request host Return the request scheme Return the request verb Return the request headers Read the request body, if there is one | 2.832234 | 3 |
pygmmis.py | pmelchior/iemgmm | 0 | 6631771 | from __future__ import division
import numpy as np
import scipy.special, scipy.stats
import ctypes
import logging
logger = logging.getLogger("pygmmis")
# set up multiprocessing
import multiprocessing
import parmap
def createShared(a, dtype=ctypes.c_double):
"""Create a shared array to be used for multiprocessing's processes.
Taken from http://stackoverflow.com/questions/5549190/
Works only for float, double, int, long types (e.g. no bool).
Args:
numpy array, arbitrary shape
Returns:
numpy array whose container is a multiprocessing.Array
"""
shared_array_base = multiprocessing.Array(dtype, a.size)
shared_array = np.ctypeslib.as_array(shared_array_base.get_obj())
shared_array[:] = a.flatten()
shared_array = shared_array.reshape(a.shape)
return shared_array
# this is to allow multiprocessing pools to operate on class methods:
# https://gist.github.com/bnyeggen/1086393
def _pickle_method(method):
func_name = method.im_func.__name__
obj = method.im_self
cls = method.im_class
if func_name.startswith('__') and not func_name.endswith('__'): #deal with mangled names
cls_name = cls.__name__.lstrip('_')
func_name = '_' + cls_name + func_name
return _unpickle_method, (func_name, obj, cls)
def _unpickle_method(func_name, obj, cls):
for cls in cls.__mro__:
try:
func = cls.__dict__[func_name]
except KeyError:
pass
else:
break
return func.__get__(obj, cls)
import types
# python 2 -> 3 adjustments
try:
import copy_reg
except ImportError:
import copyreg as copy_reg
copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
try:
xrange
except NameError:
xrange = range
# Blantant copy from <NAME>'s esutil
# https://github.com/esheldon/esutil/blob/master/esutil/numpy_util.py
def match1d(arr1input, arr2input, presorted=False):
"""
NAME:
match
CALLING SEQUENCE:
ind1,ind2 = match(arr1, arr2, presorted=False)
PURPOSE:
Match two numpy arrays. Return the indices of the matches or empty
arrays if no matches are found. This means arr1[ind1] == arr2[ind2] is
true for all corresponding pairs. arr1 must contain only unique
inputs, but arr2 may be non-unique.
If you know arr1 is sorted, set presorted=True and it will run
even faster
METHOD:
uses searchsorted with some sugar. Much faster than old version
based on IDL code.
REVISION HISTORY:
Created 2015, <NAME>, SLAC.
"""
# make sure 1D
arr1 = np.array(arr1input, ndmin=1, copy=False)
arr2 = np.array(arr2input, ndmin=1, copy=False)
# check for integer data...
if (not issubclass(arr1.dtype.type,np.integer) or
not issubclass(arr2.dtype.type,np.integer)) :
mess="Error: only works with integer types, got %s %s"
mess = mess % (arr1.dtype.type,arr2.dtype.type)
raise ValueError(mess)
if (arr1.size == 0) or (arr2.size == 0) :
mess="Error: arr1 and arr2 must each be non-zero length"
raise ValueError(mess)
# make sure that arr1 has unique values...
test=np.unique(arr1)
if test.size != arr1.size:
raise ValueError("Error: the arr1input must be unique")
# sort arr1 if not presorted
if not presorted:
st1 = np.argsort(arr1)
else:
st1 = None
# search the sorted array
sub1=np.searchsorted(arr1,arr2,sorter=st1)
# check for out-of-bounds at the high end if necessary
if (arr2.max() > arr1.max()) :
bad,=np.where(sub1 == arr1.size)
sub1[bad] = arr1.size-1
if not presorted:
sub2,=np.where(arr1[st1[sub1]] == arr2)
sub1=st1[sub1[sub2]]
else:
sub2,=np.where(arr1[sub1] == arr2)
sub1=sub1[sub2]
return sub1,sub2
def logsum(logX, axis=0):
"""Computes log of the sum along give axis from the log of the summands.
This method tries hard to avoid over- or underflow.
See appendix A of Bovy, <NAME> (2009).
Args:
logX: numpy array of logarithmic summands
axis (int): axis to sum over
Returns:
log of the sum, shortened by one axis
Throws:
ValueError if logX has length 0 along given axis
"""
floatinfo = np.finfo(logX.dtype)
underflow = np.log(floatinfo.tiny) - logX.min(axis=axis)
overflow = np.log(floatinfo.max) - logX.max(axis=axis) - np.log(logX.shape[axis])
c = np.where(underflow < overflow, underflow, overflow)
# adjust the shape of c for addition with logX
c_shape = [slice(None) for i in xrange(len(logX.shape))]
c_shape[axis] = None
return np.log(np.exp(logX + c[tuple(c_shape)]).sum(axis=axis)) - c
def chi2_cutoff(D, cutoff=3.):
"""D-dimensional eqiuvalent of "n sigma" cut.
Evaluates the quantile function of the chi-squared distribution to determine
the limit for the chi^2 of samples wrt to GMM so that they satisfy the
68-95-99.7 percent rule of the 1D Normal distribution.
Args:
D (int): dimensions of the feature space
cutoff (float): 1D equivalent cut [in units of sigma]
Returns:
float: upper limit for chi-squared in D dimensions
"""
cdf_1d = scipy.stats.norm.cdf(cutoff)
confidence_1d = 1-(1-cdf_1d)*2
cutoff_nd = scipy.stats.chi2.ppf(confidence_1d, D)
return cutoff_nd
def covar_callback_default(coords, default=None):
N,D = coords.shape
if default.shape != (D,D):
raise RuntimeError("covar_callback received improper default covariance %r" % default)
# no need to copy since a single covariance matrix is sufficient
# return np.tile(default, (N,1,1))
return default
class GMM(object):
"""Gaussian mixture model with K components in D dimensions.
Attributes:
amp: numpy array (K,), component amplitudes
mean: numpy array (K,D), component means
covar: numpy array (K,D,D), component covariances
"""
def __init__(self, K=0, D=0):
"""Create the arrays for amp, mean, covar."""
self.amp = np.zeros((K))
self.mean = np.empty((K,D))
self.covar = np.empty((K,D,D))
@property
def K(self):
"""int: number of components, depends on size of amp."""
return self.amp.size
@property
def D(self):
"""int: dimensions of the feature space."""
return self.mean.shape[1]
def save(self, filename, **kwargs):
"""Save GMM to file.
Args:
filename (str): name for saved file, should end on .npz as the default
of numpy.savez(), which is called here
kwargs: dictionary of additional information to be stored in file.
Returns:
None
"""
np.savez(filename, amp=self.amp, mean=self.mean, covar=self.covar, **kwargs)
def load(self, filename):
"""Load GMM from file.
Additional arguments stored by save() will be ignored.
Args:
filename (str): name for file create with save().
Returns:
None
"""
F = np.load(filename)
self.amp = F["amp"]
self.mean = F["mean"]
self.covar = F["covar"]
F.close()
@staticmethod
def from_file(filename):
"""Load GMM from file.
Additional arguments stored by save() will be ignored.
Args:
filename (str): name for file create with save().
Returns:
GMM
"""
gmm = GMM()
gmm.load(filename)
return gmm
def draw(self, size=1, rng=np.random):
"""Draw samples from the GMM.
Args:
size (int): number of samples to draw
rng: numpy.random.RandomState for deterministic draw
Returns:
numpy array (size,D)
"""
# draw indices for components given amplitudes, need to make sure: sum=1
ind = rng.choice(self.K, size=size, p=self.amp/self.amp.sum())
N = np.bincount(ind, minlength=self.K)
# for each component: draw as many points as in ind from a normal
samples = np.empty((size, self.D))
lower = 0
for k in np.flatnonzero(N):
upper = lower + N[k]
samples[lower:upper, :] = rng.multivariate_normal(self.mean[k], self.covar[k], size=N[k])
lower = upper
return samples
def __call__(self, coords, covar=None, as_log=False):
"""Evaluate model PDF at given coordinates.
see logL() for details.
Args:
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
as_log (bool): return log(p) instead p
Returns:
numpy array (1,) or (N, 1) of PDF (or its log)
"""
if as_log:
return self.logL(coords, covar=covar)
else:
return np.exp(self.logL(coords, covar=covar))
def _mp_chunksize(self):
# find how many components to distribute over available threads
cpu_count = multiprocessing.cpu_count()
chunksize = max(1, self.K//cpu_count)
n_chunks = min(cpu_count, self.K//chunksize)
return n_chunks, chunksize
def _get_chunks(self):
# split all component in ideal-sized chunks
n_chunks, chunksize = self._mp_chunksize()
left = self.K - n_chunks*chunksize
chunks = []
n = 0
for i in xrange(n_chunks):
n_ = n + chunksize
if left > i:
n_ += 1
chunks.append((n, n_))
n = n_
return chunks
def logL(self, coords, covar=None):
"""Log-likelihood of coords given all (i.e. the sum of) GMM components
Distributes computation over all threads on the machine.
If covar is None, this method returns
log(sum_k(p(x | k)))
of the data values x. If covar is set, the method returns
log(sum_k(p(y | k))),
where y = x + noise and noise ~ N(0, covar).
Args:
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
Returns:
numpy array (1,) or (N, 1) log(L), depending on shape of data
"""
# Instead log p (x | k) for each k (which is huge)
# compute it in stages: first for each chunk, then sum over all chunks
pool = multiprocessing.Pool()
chunks = self._get_chunks()
results = [pool.apply_async(self._logsum_chunk, (chunk, coords, covar)) for chunk in chunks]
log_p_y_chunk = []
for r in results:
log_p_y_chunk.append(r.get())
pool.close()
pool.join()
return logsum(np.array(log_p_y_chunk)) # sum over all chunks = all k
def _logsum_chunk(self, chunk, coords, covar=None):
# helper function to reduce the memory requirement of logL
log_p_y_k = np.empty((chunk[1]-chunk[0], len(coords)))
for i in xrange(chunk[1] - chunk[0]):
k = chunk[0] + i
log_p_y_k[i,:] = self.logL_k(k, coords, covar=covar)
return logsum(log_p_y_k)
def logL_k(self, k, coords, covar=None, chi2_only=False):
"""Log-likelihood of coords given only component k.
Args:
k (int): component index
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
chi2_only (bool): only compute deltaX^T Sigma_k^-1 deltaX
Returns:
numpy array (1,) or (N, 1) log(L), depending on shape of data
"""
# compute p(x | k)
dx = coords - self.mean[k]
if covar is None:
T_k = self.covar[k]
else:
T_k = self.covar[k] + covar
chi2 = np.einsum('...i,...ij,...j', dx, np.linalg.inv(T_k), dx)
if chi2_only:
return chi2
# prevent tiny negative determinants to mess up
(sign, logdet) = np.linalg.slogdet(T_k)
log2piD2 = np.log(2*np.pi)*(0.5*self.D)
return np.log(self.amp[k]) - log2piD2 - sign*logdet/2 - chi2/2
class Background(object):
"""Background object to be used in conjuction with GMM.
For a normalizable uniform distribution, a support footprint must be set.
It should be sufficiently large to explain all non-clusters samples.
Attributes:
amp (float): mixing amplitude
footprint: numpy array, (D,2) of rectangular volume
adjust_amp (bool): whether amp will be adjusted as part of the fit
amp_max (float): maximum value of amp allowed if adjust_amp=True
"""
def __init__(self, footprint, amp=0):
"""Initialize Background with a footprint.
Args:
footprint: numpy array, (D,2) of rectangular volume
Returns:
None
"""
self.amp = amp
self.footprint = footprint
self.adjust_amp = True
self.amp_max = 1
self.amp_min = 0
@property
def p(self):
"""Probability of the background model.
Returns:
float, equal to 1/volume, where volume is given by footprint.
"""
volume = np.prod(self.footprint[1] - self.footprint[0])
return 1/volume
def draw(self, size=1, rng=np.random):
"""Draw samples from uniform background.
Args:
size (int): number of samples to draw
rng: numpy.random.RandomState for deterministic draw
Returns:
numpy array (size, D)
"""
dx = self.footprint[1] - self.footprint[0]
return self.footprint[0] + dx*rng.rand(size,len(self.footprint[0]))
############################
# Begin of fit functions
############################
def initFromDataMinMax(gmm, data, covar=None, s=None, k=None, rng=np.random):
"""Initialization callback for uniform random component means.
Component amplitudes are set at 1/gmm.K, covariances are set to
s**2*np.eye(D), and means are distributed randomly over the range that is
covered by data.
If s is not given, it will be set such that the volume of all components
completely fills the space covered by data.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
s (float): if set, sets component variances
k (iterable): list of components to set, is None sets all components
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
if k is None:
k = slice(None)
gmm.amp[k] = 1/gmm.K
# set model to random positions with equally sized spheres within
# volumne spanned by data
min_pos = data.min(axis=0)
max_pos = data.max(axis=0)
gmm.mean[k,:] = min_pos + (max_pos-min_pos)*rng.rand(gmm.K, gmm.D)
# if s is not set: use volume filling argument:
# K spheres of radius s [having volume s^D * pi^D/2 / gamma(D/2+1)]
# should completely fill the volume spanned by data.
if s is None:
vol_data = np.prod(max_pos-min_pos)
s = (vol_data / gmm.K * scipy.special.gamma(gmm.D*0.5 + 1))**(1/gmm.D) / np.sqrt(np.pi)
logger.info("initializing spheres with s=%.2f in data domain" % s)
gmm.covar[k,:,:] = s**2 * np.eye(data.shape[1])
def initFromDataAtRandom(gmm, data, covar=None, s=None, k=None, rng=np.random):
"""Initialization callback for component means to follow data on scales > s.
Component amplitudes are set to 1/gmm.K, covariances are set to
s**2*np.eye(D). For each mean, a data sample is selected at random, and a
multivariant Gaussian offset is added, whose variance is given by s**2.
If s is not given, it will be set such that the volume of all components
completely fills the space covered by data.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
s (float): if set, sets component variances
k (iterable): list of components to set, is None sets all components
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
if k is None:
k = slice(None)
k_len = gmm.K
else:
try:
k_len = len(gmm.amp[k])
except TypeError:
k_len = 1
gmm.amp[k] = 1/gmm.K
# initialize components around data points with uncertainty s
refs = rng.randint(0, len(data), size=k_len)
D = data.shape[1]
if s is None:
min_pos = data.min(axis=0)
max_pos = data.max(axis=0)
vol_data = np.prod(max_pos-min_pos)
s = (vol_data / gmm.K * scipy.special.gamma(gmm.D*0.5 + 1))**(1/gmm.D) / np.sqrt(np.pi)
logger.info("initializing spheres with s=%.2f near data points" % s)
gmm.mean[k,:] = data[refs] + rng.multivariate_normal(np.zeros(D), s**2 * np.eye(D), size=k_len)
gmm.covar[k,:,:] = s**2 * np.eye(data.shape[1])
def initFromKMeans(gmm, data, covar=None, rng=np.random):
"""Initialization callback from a k-means clustering run.
See Algorithm 1 from Bloemer & Bujna (arXiv:1312.5946)
NOTE: The result of this call are not deterministic even if rng is set
because scipy.cluster.vq.kmeans2 uses its own initialization.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
from scipy.cluster.vq import kmeans2
center, label = kmeans2(data, gmm.K)
for k in xrange(gmm.K):
mask = (label == k)
gmm.amp[k] = mask.sum() / len(data)
gmm.mean[k,:] = data[mask].mean(axis=0)
d_m = data[mask] - gmm.mean[k]
# funny way of saying: for each point i, do the outer product
# of d_m with its transpose and sum over i
gmm.covar[k,:,:] = (d_m[:, :, None] * d_m[:, None, :]).sum(axis=0) / len(data)
def fit(gmm, data, covar=None, R=None, init_method='random', w=0., cutoff=None, sel_callback=None, oversampling=10, covar_callback=None, background=None, tol=1e-3, miniter=1, maxiter=1000, frozen=None, split_n_merge=False, rng=np.random):
"""Fit GMM to data.
If given, init_callback is called to set up the GMM components. Then, the
EM sequence is repeated until the mean log-likelihood converges within tol.
Args:
gmm: an instance if GMM
data: numpy array (N,D)
covar: sample noise covariance; numpy array (N,D,D) or (D,D) if i.i.d.
R: sample projection matrix; numpy array (N,D,D)
init_method (string): one of ['random', 'minmax', 'kmeans', 'none']
defines the method to initialize the GMM components
w (float): minimum covariance regularization
cutoff (float): size of component neighborhood [in 1D equivalent sigmas]
sel_callback: completeness callback to generate imputation samples.
oversampling (int): number of imputation samples per data sample.
only used if sel_callback is set.
value of 1 is fine but results are noisy. Set as high as feasible.
covar_callback: covariance callback for imputation samples.
needs to be present if sel_callback and covar are set.
background: an instance of Background if simultaneous fitting is desired
tol (float): tolerance for covergence of mean log-likelihood
maxiter (int): maximum number of iterations of EM
frozen (iterable or dict): index list of components that are not updated
split_n_merge (int): number of split & merge attempts
rng: numpy.random.RandomState for deterministic behavior
Notes:
If frozen is a simple list, it will be assumed that is applies to mean
and covariance of the specified components. It can also be a dictionary
with the keys "mean" and "covar" to specify them separately.
In either case, amplitudes will be updated to reflect any changes made.
If frozen["amp"] is set, it will use this list instead.
Returns:
mean log-likelihood (float), component neighborhoods (list of ints)
Throws:
RuntimeError for inconsistent argument combinations
"""
N = len(data)
# if there are data (features) missing, i.e. masked as np.nan, set them to zeros
# and create/set covariance elements to very large value to reduce its weight
# to effectively zero
missing = np.isnan(data)
if missing.any():
data_ = createShared(data.copy())
data_[missing] = 0 # value does not matter as long as it's not nan
if covar is None:
covar = np.zeros((gmm.D, gmm.D))
# need to create covar_callback if imputation is requested
if sel_callback is not None:
from functools import partial
covar_callback = partial(covar_callback_default, default=np.zeros((gmm.D, gmm.D)))
if covar.shape == (gmm.D, gmm.D):
covar_ = createShared(np.tile(covar, (N,1,1)))
else:
covar_ = createShared(covar.copy())
large = 1e10
for d in range(gmm.D):
covar_[missing[:,d],d,d] += large
covar_[missing[:,d],d,d] += large
else:
data_ = createShared(data.copy())
if covar is None or covar.shape == (gmm.D, gmm.D):
covar_ = covar
else:
covar_ = createShared(covar.copy())
# init components
if init_method.lower() not in ['random', 'minmax', 'kmeans', 'none']:
raise NotImplementedError("init_mehod %s not in ['random', 'minmax', 'kmeans', 'none']" % init_method)
if init_method.lower() == 'random':
initFromDataAtRandom(gmm, data_, covar=covar_, rng=rng)
if init_method.lower() == 'minmax':
initFromDataMinMax(gmm, data_, covar=covar_, rng=rng)
if init_method.lower() == 'kmeans':
initFromKMeans(gmm, data_, covar=covar_, rng=rng)
# test if callbacks are consistent
if sel_callback is not None and covar is not None and covar_callback is None:
raise NotImplementedError("covar is set, but covar_callback is None: imputation samples inconsistent")
# set up pool
pool = multiprocessing.Pool()
n_chunks, chunksize = gmm._mp_chunksize()
# containers
# precautions for cases when some points are treated as outliers
# and not considered as belonging to any component
log_S = createShared(np.zeros(N)) # S = sum_k p(x|k)
log_p = [[] for k in xrange(gmm.K)] # P = p(x|k) for x in U[k]
T_inv = [None for k in xrange(gmm.K)] # T = covar(x) + gmm.covar[k]
U = [None for k in xrange(gmm.K)] # U = {x close to k}
p_bg = None
if background is not None:
gmm.amp *= 1 - background.amp # GMM amp + BG amp = 1
p_bg = [None] # p_bg = p(x|BG), no log because values are larger
if covar is not None:
# check if covar is diagonal and issue warning if not
mess = "background model will only consider diagonal elements of covar"
nondiag = ~np.eye(gmm.D, dtype='bool')
if covar.shape == (gmm.D, gmm.D):
if (covar[nondiag] != 0).any():
logger.warning(mess)
else:
if (covar[np.tile(nondiag,(N,1,1))] != 0).any():
logger.warning(mess)
# check if all component parameters can be changed
changeable = {"amp": slice(None), "mean": slice(None), "covar": slice(None)}
if frozen is not None:
if all(isinstance(item, int) for item in frozen):
changeable['amp'] = changeable['mean'] = changeable['covar'] = np.in1d(xrange(gmm.K), frozen, assume_unique=True, invert=True)
elif hasattr(frozen, 'keys') and np.in1d(["amp","mean","covar"], tuple(frozen.keys()), assume_unique=True).any():
if "amp" in frozen.keys():
changeable['amp'] = np.in1d(xrange(gmm.K), frozen['amp'], assume_unique=True, invert=True)
if "mean" in frozen.keys():
changeable['mean'] = np.in1d(xrange(gmm.K), frozen['mean'], assume_unique=True, invert=True)
if "covar" in frozen.keys():
changeable['covar'] = np.in1d(xrange(gmm.K), frozen['covar'], assume_unique=True, invert=True)
else:
raise NotImplementedError("frozen should be list of indices or dictionary with keys in ['amp','mean','covar']")
try:
log_L, N, N2 = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, changeable=changeable, miniter=miniter, maxiter=maxiter, tol=tol, rng=rng)
except Exception:
# cleanup
pool.close()
pool.join()
del data_, covar_, log_S
raise
# should we try to improve by split'n'merge of components?
# if so, keep backup copy
gmm_ = None
if frozen is not None and split_n_merge:
logger.warning("forgoing split'n'merge because some components are frozen")
else:
while split_n_merge and gmm.K >= 3:
if gmm_ is None:
gmm_ = GMM(gmm.K, gmm.D)
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
U_ = [U[k].copy() for k in xrange(gmm.K)]
changing, cleanup = _findSNMComponents(gmm, U, log_p, log_S, N+N2, pool=pool, chunksize=chunksize)
logger.info("merging %d and %d, splitting %d" % tuple(changing))
# modify components
_update_snm(gmm, changing, U, N+N2, cleanup)
# run partial EM on changeable components
# NOTE: for a partial run, we'd only need the change to Log_S from the
# changeable components. However, the neighborhoods can change from _update_snm
# or because they move, so that operation is ill-defined.
# Thus, we'll always run a full E-step, which is pretty cheap for
# converged neighborhood.
# The M-step could in principle be run on the changeable components only,
# but there seem to be side effects in what I've tried.
# Similar to the E-step, the imputation step needs to be run on all
# components, otherwise the contribution of the changeable ones to the mixture
# would be over-estimated.
# Effectively, partial runs are as expensive as full runs.
changeable['amp'] = changeable['mean'] = changeable['covar'] = np.in1d(xrange(gmm.K), changing, assume_unique=True)
log_L_, N_, N2_ = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, maxiter=maxiter, tol=tol, prefix="SNM_P", changeable=changeable, rng=rng)
changeable['amp'] = changeable['mean'] = changeable['covar'] = slice(None)
log_L_, N_, N2_ = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, maxiter=maxiter, tol=tol, prefix="SNM_F", changeable=changeable, rng=rng)
if log_L >= log_L_:
# revert to backup
gmm.amp[:] = gmm_.amp[:]
gmm.mean[:] = gmm_.mean[:,:]
gmm.covar[:,:,:] = gmm_.covar[:,:,:]
U = U_
logger.info ("split'n'merge likelihood decreased: reverting to previous model")
break
log_L = log_L_
split_n_merge -= 1
pool.close()
pool.join()
del data_, covar_, log_S
return log_L, U
# run EM sequence
def _EM(gmm, log_p, U, T_inv, log_S, data, covar=None, R=None, sel_callback=None, oversampling=10, covar_callback=None, background=None, p_bg=None, w=0, pool=None, chunksize=1, cutoff=None, miniter=1, maxiter=1000, tol=1e-3, prefix="", changeable=None, rng=np.random):
# compute effective cutoff for chi2 in D dimensions
if cutoff is not None:
# note: subsequently the cutoff parameter, e.g. in _E(), refers to this:
# chi2 < cutoff,
# while in fit() it means e.g. "cut at 3 sigma".
# These differing conventions need to be documented well.
cutoff_nd = chi2_cutoff(gmm.D, cutoff=cutoff)
# store chi2 cutoff for component shifts, use 0.5 sigma
shift_cutoff = chi2_cutoff(gmm.D, cutoff=min(0.1, cutoff/2))
else:
cutoff_nd = None
shift_cutoff = chi2_cutoff(gmm.D, cutoff=0.1)
if sel_callback is not None:
omega = createShared(sel_callback(data).astype("float"))
if np.any(omega == 0):
logger.warning("Selection probability Omega = 0 for an observed sample.")
logger.warning("Selection callback likely incorrect! Bad things will happen!")
else:
omega = None
it = 0
header = "ITER\tSAMPLES"
if sel_callback is not None:
header += "\tIMPUTED\tORIG"
if background is not None:
header += "\tBG_AMP"
header += "\tLOG_L\tSTABLE"
logger.info(header)
# save backup
gmm_ = GMM(gmm.K, gmm.D)
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:,:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
N0 = len(data) # size of original (unobscured) data set (signal and background)
N2 = 0 # size of imputed signal sample
if background is not None:
bg_amp_ = background.amp
while it < maxiter: # limit loop in case of slow convergence
log_L_, N, N2_, N0_ = _EMstep(gmm, log_p, U, T_inv, log_S, N0, data, covar=covar, R=R, sel_callback=sel_callback, omega=omega, oversampling=oversampling, covar_callback=covar_callback, background=background, p_bg=p_bg , w=w, pool=pool, chunksize=chunksize, cutoff=cutoff_nd, tol=tol, changeable=changeable, it=it, rng=rng)
# check if component has moved by more than sigma/2
shift2 = np.einsum('...i,...ij,...j', gmm.mean - gmm_.mean, np.linalg.inv(gmm_.covar), gmm.mean - gmm_.mean)
moved = np.flatnonzero(shift2 > shift_cutoff)
status_mess = "%s%d\t%d" % (prefix, it, N)
if sel_callback is not None:
status_mess += "\t%.2f\t%.2f" % (N2_, N0_)
if background is not None:
status_mess += "\t%.3f" % bg_amp_
status_mess += "\t%.3f\t%d" % (log_L_, gmm.K - moved.size)
logger.info(status_mess)
# convergence tests
if it > miniter:
if sel_callback is None:
if np.abs(log_L_ - log_L) < tol * np.abs(log_L) and moved.size == 0:
log_L = log_L_
logger.info("likelihood converged within relative tolerance %r: stopping here." % tol)
break
else:
if np.abs(N0_ - N0) < tol * N0 and np.abs(N2_ - N2) < tol * N2 and moved.size == 0:
log_L = log_L_
logger.info("imputation sample size converged within relative tolerance %r: stopping here." % tol)
break
# force update to U for all moved components
if cutoff is not None:
for k in moved:
U[k] = None
if moved.size:
logger.debug("resetting neighborhoods of moving components: (" + ("%d," * moved.size + ")") % tuple(moved))
# update all important _ quantities for convergence test(s)
log_L = log_L_
N0 = N0_
N2 = N2_
# backup to see if components move or if next step gets worse
# note: not gmm = gmm_ !
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:,:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
if background is not None:
bg_amp_ = background.amp
it += 1
return log_L, N, N2
# run one EM step
def _EMstep(gmm, log_p, U, T_inv, log_S, N0, data, covar=None, R=None, sel_callback=None, omega=None, oversampling=10, covar_callback=None, background=None, p_bg=None, w=0, pool=None, chunksize=1, cutoff=None, tol=1e-3, changeable=None, it=0, rng=np.random):
# NOTE: T_inv (in fact (T_ik)^-1 for all samples i and components k)
# is very large and is unfortunately duplicated in the parallelized _Mstep.
# If memory is too limited, one can recompute T_inv in _Msums() instead.
log_L = _Estep(gmm, log_p, U, T_inv, log_S, data, covar=covar, R=R, omega=omega, background=background, p_bg=p_bg, pool=pool, chunksize=chunksize, cutoff=cutoff, it=it)
A,M,C,N,B = _Mstep(gmm, U, log_p, T_inv, log_S, data, covar=covar, R=R, p_bg=p_bg, pool=pool, chunksize=chunksize)
A2 = M2 = C2 = B2 = N2 = 0
# here the magic happens: imputation from the current model
if sel_callback is not None:
# if there are projections / missing data, we don't know how to
# generate those for the imputation samples
# NOTE: in principle, if there are only missing data, i.e. R is 1_D,
# we could ignore missingness for data2 because we'll do an analytic
# marginalization. This doesn't work if R is a non-trivial matrix.
if R is not None:
raise NotImplementedError("R is not None: imputation samples likely inconsistent")
# create fake data with same mechanism as the original data,
# but invert selection to get the missing part
data2, covar2, N0, omega2 = draw(gmm, len(data)*oversampling, sel_callback=sel_callback, orig_size=N0*oversampling, invert_sel=True, covar_callback=covar_callback, background=background, rng=rng)
data2 = createShared(data2)
if not(covar2 is None or covar2.shape == (gmm.D, gmm.D)):
covar2 = createShared(covar2)
N0 = N0/oversampling
U2 = [None for k in xrange(gmm.K)]
if len(data2) > 0:
log_S2 = np.zeros(len(data2))
log_p2 = [[] for k in xrange(gmm.K)]
T2_inv = [None for k in xrange(gmm.K)]
R2 = None
if background is not None:
p_bg2 = [None]
else:
p_bg2 = None
log_L2 = _Estep(gmm, log_p2, U2, T2_inv, log_S2, data2, covar=covar2, R=R2, omega=None, background=background, p_bg=p_bg2, pool=pool, chunksize=chunksize, cutoff=cutoff, it=it)
A2,M2,C2,N2,B2 = _Mstep(gmm, U2, log_p2, T2_inv, log_S2, data2, covar=covar2, R=R2, p_bg=p_bg2, pool=pool, chunksize=chunksize)
# normalize for oversampling
A2 /= oversampling
M2 /= oversampling
C2 /= oversampling
B2 /= oversampling
N2 = N2/oversampling # need floating point precision in update
# check if components have outside selection
sel_outside = A2 > tol * A
if sel_outside.any():
logger.debug("component inside fractions: " + ("(" + "%.2f," * gmm.K + ")") % tuple(A/(A+A2)))
# correct the observed likelihood for the overall normalization constant of
# of the data process with selection:
# logL(x | gmm) = sum_k p_k(x) / Z(gmm), with Z(gmm) = int dx sum_k p_k(x) = 1
# becomes
# logL(x | gmm) = sum_k Omega(x) p_k(x) / Z'(gmm),
# with Z'(gmm) = int dx Omega(x) sum_k p_k(x), which we can gt by MC integration
log_L -= N * np.log((omega.sum() + omega2.sum() / oversampling) / (N + N2))
_update(gmm, A, M, C, N, B, A2, M2, C2, N2, B2, w, changeable=changeable, background=background)
return log_L, N, N2, N0
# perform E step calculations.
# If cutoff is set, this will also set the neighborhoods U
def _Estep(gmm, log_p, U, T_inv, log_S, data, covar=None, R=None, omega=None, background=None, p_bg=None, pool=None, chunksize=1, cutoff=None, it=0, rng=np.random):
# compute p(i | k) for each k independently in the pool
# need S = sum_k p(i | k) for further calculation
log_S[:] = 0
# H = {i | i in neighborhood[k]} for any k, needed for outliers below
# TODO: Use only when cutoff is set
H = np.zeros(len(data), dtype="bool")
k = 0
for log_p[k], U[k], T_inv[k] in \
parmap.starmap(_Esum, zip(xrange(gmm.K), U), gmm, data, covar, R, cutoff, pool=pool, chunksize=chunksize):
log_S[U[k]] += np.exp(log_p[k]) # actually S, not logS
H[U[k]] = 1
k += 1
if background is not None:
p_bg[0] = background.amp * background.p
if covar is not None:
# This is the zeroth moment of a truncated Normal error distribution
# Its calculation is simple only of the covariance is diagonal!
# See e.g. Manjunath & Wilhem (2012) if not
error = np.ones(len(data))
x0,x1 = background.footprint
for d in range(gmm.D):
if covar.shape == (gmm.D, gmm.D): # one-for-all
denom = np.sqrt(2 * covar[d,d])
else:
denom = np.sqrt(2 * covar[:,d,d])
# CAUTION: The erf is approximate and returns 0
# Thus, we don't add the logs but multiple the value itself
# underrun is not a big problem here
error *= np.real(scipy.special.erf((data[:,d] - x0[d])/denom) - scipy.special.erf((data[:,d] - x1[d])/denom)) / 2
p_bg[0] *= error
log_S[:] = np.log(log_S + p_bg[0])
if omega is not None:
log_S += np.log(omega)
log_L = log_S.sum()
else:
# need log(S), but since log(0) isn't a good idea, need to restrict to H
log_S[H] = np.log(log_S[H])
if omega is not None:
log_S += np.log(omega)
log_L = log_S[H].sum()
return log_L
# compute chi^2, and apply selections on component neighborhood based in chi^2
def _Esum(k, U_k, gmm, data, covar=None, R=None, cutoff=None):
# since U_k could be None, need explicit reshape
d_ = data[U_k].reshape(-1, gmm.D)
if covar is not None:
if covar.shape == (gmm.D, gmm.D): # one-for-all
covar_ = covar
else: # each datum has covariance
covar_ = covar[U_k].reshape(-1, gmm.D, gmm.D)
else:
covar_ = 0
if R is not None:
R_ = R[U_k].reshape(-1, gmm.D, gmm.D)
# p(x | k) for all x in the vicinity of k
# determine all points within cutoff sigma from mean[k]
if R is None:
dx = d_ - gmm.mean[k]
else:
dx = d_ - np.dot(R_, gmm.mean[k])
if covar is None and R is None:
T_inv_k = None
chi2 = np.einsum('...i,...ij,...j', dx, np.linalg.inv(gmm.covar[k]), dx)
else:
# with data errors: need to create and return T_ik = covar_i + C_k
# and weight each datum appropriately
if R is None:
T_inv_k = np.linalg.inv(gmm.covar[k] + covar_)
else: # need to project out missing elements: T_ik = R_i C_k R_i^R + covar_i
T_inv_k = np.linalg.inv(np.einsum('...ij,jk,...lk', R_, gmm.covar[k], R_) + covar_)
chi2 = np.einsum('...i,...ij,...j', dx, T_inv_k, dx)
# NOTE: close to convergence, we could stop applying the cutoff because
# changes to U will be minimal
if cutoff is not None:
indices = chi2 < cutoff
chi2 = chi2[indices]
if (covar is not None and covar.shape != (gmm.D, gmm.D)) or R is not None:
T_inv_k = T_inv_k[indices]
if U_k is None:
U_k = np.flatnonzero(indices)
else:
U_k = U_k[indices]
# prevent tiny negative determinants to mess up
if covar is None:
(sign, logdet) = np.linalg.slogdet(gmm.covar[k])
else:
(sign, logdet) = np.linalg.slogdet(T_inv_k)
sign *= -1 # since det(T^-1) = 1/det(T)
log2piD2 = np.log(2*np.pi)*(0.5*gmm.D)
return np.log(gmm.amp[k]) - log2piD2 - sign*logdet/2 - chi2/2, U_k, T_inv_k
# get zeroth, first, second moments of the data weighted with p_k(x) avgd over x
def _Mstep(gmm, U, log_p, T_inv, log_S, data, covar=None, R=None, p_bg=None, pool=None, chunksize=1):
# save the M sums from observed data
A = np.empty(gmm.K) # sum for amplitudes
M = np.empty((gmm.K, gmm.D)) # ... means
C = np.empty((gmm.K, gmm.D, gmm.D)) # ... covariances
N = len(data)
# perform sums for M step in the pool
# NOTE: in a partial run, could work on changeable components only;
# however, there seem to be side effects or race conditions
k = 0
for A[k], M[k,:], C[k,:,:] in \
parmap.starmap(_Msums, zip(xrange(gmm.K), U, log_p, T_inv), gmm, data, R, log_S, pool=pool, chunksize=chunksize):
k += 1
if p_bg is not None:
q_bg = p_bg[0] / np.exp(log_S)
B = q_bg.sum() # equivalent to A_k in _Msums, but done without logs
else:
B = 0
return A,M,C,N,B
# compute moments for the Mstep
def _Msums(k, U_k, log_p_k, T_inv_k, gmm, data, R, log_S):
if log_p_k.size == 0:
return 0,0,0
# get log_q_ik by dividing with S = sum_k p_ik
# NOTE: this modifies log_p_k in place, but is only relevant
# within this method since the call is parallel and its arguments
# therefore don't get updated across components.
# NOTE: reshape needed when U_k is None because of its
# implicit meaning as np.newaxis
log_p_k -= log_S[U_k].reshape(log_p_k.size)
d = data[U_k].reshape((log_p_k.size, gmm.D))
if R is not None:
R_ = R[U_k].reshape((log_p_k.size, gmm.D, gmm.D))
# amplitude: A_k = sum_i q_ik
A_k = np.exp(logsum(log_p_k))
# in fact: q_ik, but we treat sample index i silently everywhere
q_k = np.exp(log_p_k)
if R is None:
d_m = d - gmm.mean[k]
else:
d_m = d - np.dot(R_, gmm.mean[k])
# data with errors?
if T_inv_k is None and R is None:
# mean: M_k = sum_i x_i q_ik
M_k = (d * q_k[:,None]).sum(axis=0)
# covariance: C_k = sum_i (x_i - mu_k)^T(x_i - mu_k) q_ik
# funny way of saying: for each point i, do the outer product
# of d_m with its transpose, multiply with pi[i], and sum over i
C_k = (q_k[:, None, None] * d_m[:, :, None] * d_m[:, None, :]).sum(axis=0)
else:
if R is None: # that means T_ik is not None
# b_ik = mu_k + C_k T_ik^-1 (x_i - mu_k)
# B_ik = C_k - C_k T_ik^-1 C_k
b_k = gmm.mean[k] + np.einsum('ij,...jk,...k', gmm.covar[k], T_inv_k, d_m)
B_k = gmm.covar[k] - np.einsum('ij,...jk,...kl', gmm.covar[k], T_inv_k, gmm.covar[k])
else:
# F_ik = C_k R_i^T T_ik^-1
F_k = np.einsum('ij,...kj,...kl', gmm.covar[k], R_, T_inv_k)
b_k = gmm.mean[k] + np.einsum('...ij,...j', F_k, d_m)
B_k = gmm.covar[k] - np.einsum('...ij,...jk,kl', F_k, R_, gmm.covar[k])
#b_k = gmm.mean[k] + np.einsum('ij,...jk,...k', gmm.covar[k], T_inv_k, d_m)
#B_k = gmm.covar[k] - np.einsum('ij,...jk,...kl', gmm.covar[k], T_inv_k, gmm.covar[k])
M_k = (b_k * q_k[:,None]).sum(axis=0)
b_k -= gmm.mean[k]
C_k = (q_k[:, None, None] * (b_k[:, :, None] * b_k[:, None, :] + B_k)).sum(axis=0)
return A_k, M_k, C_k
# update component with the moment matrices.
# If changeable is set, update only those components and renormalize the amplitudes
def _update(gmm, A, M, C, N, B, A2, M2, C2, N2, B2, w, changeable=None, background=None):
# recompute background amplitude
if background is not None and background.adjust_amp:
background.amp = max(min((B + B2) / (N + N2), background.amp_max), background.amp_min)
# amp update:
# for partial update: need to update amp for any component that is changeable
if not hasattr(changeable['amp'], '__iter__'): # it's a slice(None), not a bool array
gmm.amp[changeable['amp']] = (A + A2)[changeable['amp']] / (N + N2)
else:
# Bovy eq. 31, with correction for bg.amp if needed
if background is None:
total = 1
else:
total = 1 - background.amp
gmm.amp[changeable['amp']] = (A + A2)[changeable['amp']] / (A + A2)[changeable['amp']].sum() * (total - (gmm.amp[~changeable['amp']]).sum())
# mean updateL
gmm.mean[changeable['mean'],:] = (M + M2)[changeable['mean'],:]/(A + A2)[changeable['mean'],None]
# covar updateL
# minimum covariance term?
if w > 0:
# we assume w to be a lower bound of the isotropic dispersion,
# C_k = w^2 I + ...
# then eq. 38 in Bovy et al. only ~works for N = 0 because of the
# prefactor 1 / (q_j + 1) = 1 / (A + 1) in our terminology
# On average, q_j = N/K, so we'll adopt that to correct.
w_eff = w**2 * ((N+N2)/gmm.K + 1)
gmm.covar[changeable['covar'],:,:] = (C + C2 + w_eff*np.eye(gmm.D)[None,:,:])[changeable['covar'],:,:] / (A + A2 + 1)[changeable['covar'],None,None]
else:
gmm.covar[changeable['covar'],:,:] = (C + C2)[changeable['covar'],:,:] / (A + A2)[changeable['covar'],None,None]
# draw from the model (+ background) and apply appropriate covariances
def _drawGMM_BG(gmm, size, covar_callback=None, background=None, rng=np.random):
# draw sample from model, or from background+model
if background is None:
data2 = gmm.draw(int(np.round(size)), rng=rng)
else:
# model is GMM + Background
bg_size = int(background.amp * size)
data2 = np.concatenate((gmm.draw(int(np.round(size-bg_size)), rng=rng), background.draw(int(np.round(bg_size)), rng=rng)))
# add noise
# NOTE: When background is set, adding noise is problematic if
# scattering them out is more likely than in.
# This can be avoided when the background footprint is large compared to
# selection region
if covar_callback is not None:
covar2 = covar_callback(data2)
if covar2.shape == (gmm.D, gmm.D): # one-for-all
noise = rng.multivariate_normal(np.zeros(gmm.D), covar2, size=len(data2))
else:
# create noise from unit covariance and then dot with eigenvalue
# decomposition of covar2 to get a the right noise distribution:
# n' = R V^1/2 n, where covar = R V R^-1
# faster than drawing one sample per each covariance
noise = rng.multivariate_normal(np.zeros(gmm.D), np.eye(gmm.D), size=len(data2))
val, rot = np.linalg.eigh(covar2)
val = np.maximum(val,0) # to prevent univariate errors to underflow
noise = np.einsum('...ij,...j', rot, np.sqrt(val)*noise)
data2 += noise
else:
covar2 = None
return data2, covar2
def draw(gmm, obs_size, sel_callback=None, invert_sel=False, orig_size=None, covar_callback=None, background=None, rng=np.random):
"""Draw from the GMM (and the Background) with noise and selection.
Draws orig_size samples from the GMM and the Background, if set; calls
covar_callback if set and applies resulting covariances; the calls
sel_callback on the (noisy) samples and returns those matching ones.
If the number is resulting samples is inconsistent with obs_size, i.e.
outside of the 68 percent confidence limit of a Poisson draw, it will
update its estimate for the original sample size orig_size.
An estimate can be provided with orig_size, otherwise it will use obs_size.
Note:
If sel_callback is set, the number of returned samples is not
necessarily given by obs_size.
Args:
gmm: an instance if GMM
obs_size (int): number of observed samples
sel_callback: completeness callback to generate imputation samples.
invert_sel (bool): whether to invert the result of sel_callback
orig_size (int): an estimate of the original size of the sample.
background: an instance of Background
covar_callback: covariance callback for imputation samples.
rng: numpy.random.RandomState for deterministic behavior
Returns:
sample: nunmpy array (N_orig, D)
covar_sample: numpy array (N_orig, D, D) or None of covar_callback=None
N_orig (int): updated estimate of orig_size if sel_callback is set
Throws:
RuntimeError for inconsistent argument combinations
"""
if orig_size is None:
orig_size = int(obs_size)
# draw from model (with background) and add noise.
# TODO: may want to decide whether to add noise before selection or after
# Here we do noise, then selection, but this is not fundamental
data, covar = _drawGMM_BG(gmm, orig_size, covar_callback=covar_callback, background=background, rng=rng)
# apply selection
if sel_callback is not None:
omega = sel_callback(data)
sel = rng.rand(len(data)) < omega
# check if predicted observed size is consistent with observed data
# 68% confidence interval for Poisson variate: observed size
alpha = 0.32
lower = 0.5*scipy.stats.chi2.ppf(alpha/2, 2*obs_size)
upper = 0.5*scipy.stats.chi2.ppf(1 - alpha/2, 2*obs_size + 2)
obs_size_ = sel.sum()
while obs_size_ > upper or obs_size_ < lower:
orig_size = int(orig_size / obs_size_ * obs_size)
data, covar = _drawGMM_BG(gmm, orig_size, covar_callback=covar_callback, background=background, rng=rng)
omega = sel_callback(data)
sel = rng.rand(len(data)) < omega
obs_size_ = sel.sum()
if invert_sel:
sel = ~sel
data = data[sel]
omega = omega[sel]
if covar_callback is not None and covar.shape != (gmm.D, gmm.D):
covar = covar[sel]
return data, covar, orig_size, omega
def _JS(k, gmm, log_p, log_S, U, A):
# compute Kullback-Leiber divergence
log_q_k = log_p[k] - log_S[U[k]]
return np.dot(np.exp(log_q_k), log_q_k - np.log(A[k]) - log_p[k] + np.log(gmm.amp[k])) / A[k]
def _findSNMComponents(gmm, U, log_p, log_S, N, pool=None, chunksize=1):
# find those components that are most similar
JM = np.zeros((gmm.K, gmm.K))
# compute log_q (posterior for k given i), but use normalized probabilities
# to allow for merging of empty components
log_q = [log_p[k] - log_S[U[k]] - np.log(gmm.amp[k]) for k in xrange(gmm.K)]
for k in xrange(gmm.K):
# don't need diagonal (can merge), and JM is symmetric
for j in xrange(k+1, gmm.K):
# get index list for intersection of U of k and l
# FIXME: match1d fails if either U is empty
# SOLUTION: merge empty U, split another
i_k, i_j = match1d(U[k], U[j], presorted=True)
JM[k,j] = np.dot(np.exp(log_q[k][i_k]), np.exp(log_q[j][i_j]))
merge_jk = np.unravel_index(JM.argmax(), JM.shape)
# if all Us are disjunct, JM is blank and merge_jk = [0,0]
# merge two smallest components and clean up from the bottom
cleanup = False
if merge_jk[0] == 0 and merge_jk[1] == 0:
logger.debug("neighborhoods disjunct. merging components %d and %d" % tuple(merge_jk))
merge_jk = np.argsort(gmm.amp)[:2]
cleanup = True
# split the one whose p(x|k) deviate most from current Gaussian
# ask for the three worst components to avoid split being in merge_jk
"""
JS = np.empty(gmm.K)
k = 0
A = gmm.amp * N
for JS[k] in \
parmap.map(_JS, xrange(gmm.K), gmm, log_p, log_S, U, A, pool=pool, chunksize=chunksize):
k += 1
"""
# get largest Eigenvalue, weighed by amplitude
# Large EV implies extended object, which often is caused by coverving
# multiple clusters. This happes also for almost empty components, which
# should rather be merged than split, hence amplitude weights.
# TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered
EV = np.linalg.svd(gmm.covar, compute_uv=False)
JS = EV[:,0] * gmm.amp
split_l3 = np.argsort(JS)[-3:][::-1]
# check that the three indices are unique
changing = np.array([merge_jk[0], merge_jk[1], split_l3[0]])
if split_l3[0] in merge_jk:
if split_l3[1] not in merge_jk:
changing[2] = split_l3[1]
else:
changing[2] = split_l3[2]
return changing, cleanup
def _update_snm(gmm, changeable, U, N, cleanup):
# reconstruct A from gmm.amp
A = gmm.amp * N
# update parameters and U
# merge 0 and 1, store in 0, Bovy eq. 39
gmm.amp[changeable[0]] = gmm.amp[changeable[0:2]].sum()
if not cleanup:
gmm.mean[changeable[0]] = np.sum(gmm.mean[changeable[0:2]] * A[changeable[0:2]][:,None], axis=0) / A[changeable[0:2]].sum()
gmm.covar[changeable[0]] = np.sum(gmm.covar[changeable[0:2]] * A[changeable[0:2]][:,None,None], axis=0) / A[changeable[0:2]].sum()
U[changeable[0]] = np.union1d(U[changeable[0]], U[changeable[1]])
else:
# if we're cleaning up the weakest components:
# merging does not lead to valid component parameters as the original
# ones can be anywhere. Simply adopt second one.
gmm.mean[changeable[0],:] = gmm.mean[changeable[1],:]
gmm.covar[changeable[0],:,:] = gmm.covar[changeable[1],:,:]
U[changeable[0]] = U[changeable[1]]
# split 2, store in 1 and 2
# following SVD method in Zhang 2003, with alpha=1/2, u = 1/4
gmm.amp[changeable[1]] = gmm.amp[changeable[2]] = gmm.amp[changeable[2]] / 2
# TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered
_, radius2, rotation = np.linalg.svd(gmm.covar[changeable[2]])
dl = np.sqrt(radius2[0]) * rotation[0] / 4
gmm.mean[changeable[1]] = gmm.mean[changeable[2]] - dl
gmm.mean[changeable[2]] = gmm.mean[changeable[2]] + dl
gmm.covar[changeable[1:]] = np.linalg.det(gmm.covar[changeable[2]])**(1/gmm.D) * np.eye(gmm.D)
U[changeable[1]] = U[changeable[2]].copy() # now 1 and 2 have same U
# L-fold cross-validation of the fit function.
# all parameters for fit must be supplied with kwargs.
# the rng seed will be fixed for the CV runs so that all random effects are the
# same for each run.
def cv_fit(gmm, data, L=10, **kwargs):
N = len(data)
lcv = np.empty(N)
logger.info("running %d-fold cross-validation ..." % L)
# CV and stacking can't have probabilistic inits that depends on
# data or subsets thereof
init_callback = kwargs.get("init_callback", None)
if init_callback is not None:
raise RuntimeError("Cross-validation can only be used consistently with init_callback=None")
# make sure we know what the RNG is,
# fix state of RNG to make behavior of fit reproducable
rng = kwargs.get("rng", np.random)
rng_state = rng.get_state()
# need to copy the gmm when init_cb is None
# otherwise runs start from different init positions
gmm0 = GMM(K=gmm.K, D=gmm.D)
gmm0.amp[:,] = gmm.amp[:]
gmm0.mean[:,:] = gmm.mean[:,:]
gmm0.covar[:,:,:] = gmm.covar[:,:,:]
# same for bg if present
bg = kwargs.get("background", None)
if bg is not None:
bg_amp0 = bg.amp
# to L-fold CV here, need to split covar too if set
covar = kwargs.pop("covar", None)
for i in xrange(L):
rng.set_state(rng_state)
mask = np.arange(N) % L == i
if covar is None or covar.shape == (gmm.D, gmm.D):
fit(gmm, data[~mask], covar=covar, **kwargs)
lcv[mask] = gmm.logL(data[mask], covar=covar)
else:
fit(gmm, data[~mask], covar=covar[~mask], **kwargs)
lcv[mask] = gmm.logL(data[mask], covar=covar[mask])
# undo for consistency
gmm.amp[:,] = gmm0.amp[:]
gmm.mean[:,:] = gmm0.mean[:,:]
gmm.covar[:,:,:] = gmm0.covar[:,:,:]
if bg is not None:
bg.amp = bg_amp0
return lcv
def stack(gmms, weights):
# build stacked model by combining all gmms and applying weights to amps
stacked = GMM(K=0, D=gmms[0].D)
for m in xrange(len(gmms)):
stacked.amp = np.concatenate((stacked.amp[:], weights[m]*gmms[m].amp[:]))
stacked.mean = np.concatenate((stacked.mean[:,:], gmms[m].mean[:,:]))
stacked.covar = np.concatenate((stacked.covar[:,:,:], gmms[m].covar[:,:,:]))
stacked.amp /= stacked.amp.sum()
return stacked
def stack_fit(gmms, data, kwargs, L=10, tol=1e-5, rng=np.random):
M = len(gmms)
N = len(data)
lcvs = np.empty((M,N))
for m in xrange(M):
# run CV to get cross-validation likelihood
rng_state = rng.get_state()
lcvs[m,:] = cv_fit(gmms[m], data, L=L, **(kwargs[m]))
rng.set_state(rng_state)
# run normal fit on all data
fit(gmms[m], data, **(kwargs[m]))
# determine the weights that maximize the stacked estimator likelihood
# run a tiny EM on lcvs to get them
beta = np.ones(M)/M
log_p_k = np.empty_like(lcvs)
log_S = np.empty(N)
it = 0
logger.info("optimizing stacking weights\n")
logger.info("ITER\tLOG_L")
while True and it < 20:
log_p_k[:,:] = lcvs + np.log(beta)[:,None]
log_S[:] = logsum(log_p_k)
log_p_k[:,:] -= log_S
beta[:] = np.exp(logsum(log_p_k, axis=1)) / N
logL_ = log_S.mean()
logger.info("STACK%d\t%.4f" % (it, logL_))
if it > 0 and logL_ - logL < tol:
break
logL = logL_
it += 1
return stack(gmms, beta)
| from __future__ import division
import numpy as np
import scipy.special, scipy.stats
import ctypes
import logging
logger = logging.getLogger("pygmmis")
# set up multiprocessing
import multiprocessing
import parmap
def createShared(a, dtype=ctypes.c_double):
"""Create a shared array to be used for multiprocessing's processes.
Taken from http://stackoverflow.com/questions/5549190/
Works only for float, double, int, long types (e.g. no bool).
Args:
numpy array, arbitrary shape
Returns:
numpy array whose container is a multiprocessing.Array
"""
shared_array_base = multiprocessing.Array(dtype, a.size)
shared_array = np.ctypeslib.as_array(shared_array_base.get_obj())
shared_array[:] = a.flatten()
shared_array = shared_array.reshape(a.shape)
return shared_array
# this is to allow multiprocessing pools to operate on class methods:
# https://gist.github.com/bnyeggen/1086393
def _pickle_method(method):
func_name = method.im_func.__name__
obj = method.im_self
cls = method.im_class
if func_name.startswith('__') and not func_name.endswith('__'): #deal with mangled names
cls_name = cls.__name__.lstrip('_')
func_name = '_' + cls_name + func_name
return _unpickle_method, (func_name, obj, cls)
def _unpickle_method(func_name, obj, cls):
for cls in cls.__mro__:
try:
func = cls.__dict__[func_name]
except KeyError:
pass
else:
break
return func.__get__(obj, cls)
import types
# python 2 -> 3 adjustments
try:
import copy_reg
except ImportError:
import copyreg as copy_reg
copy_reg.pickle(types.MethodType, _pickle_method, _unpickle_method)
try:
xrange
except NameError:
xrange = range
# Blantant copy from <NAME>'s esutil
# https://github.com/esheldon/esutil/blob/master/esutil/numpy_util.py
def match1d(arr1input, arr2input, presorted=False):
"""
NAME:
match
CALLING SEQUENCE:
ind1,ind2 = match(arr1, arr2, presorted=False)
PURPOSE:
Match two numpy arrays. Return the indices of the matches or empty
arrays if no matches are found. This means arr1[ind1] == arr2[ind2] is
true for all corresponding pairs. arr1 must contain only unique
inputs, but arr2 may be non-unique.
If you know arr1 is sorted, set presorted=True and it will run
even faster
METHOD:
uses searchsorted with some sugar. Much faster than old version
based on IDL code.
REVISION HISTORY:
Created 2015, <NAME>, SLAC.
"""
# make sure 1D
arr1 = np.array(arr1input, ndmin=1, copy=False)
arr2 = np.array(arr2input, ndmin=1, copy=False)
# check for integer data...
if (not issubclass(arr1.dtype.type,np.integer) or
not issubclass(arr2.dtype.type,np.integer)) :
mess="Error: only works with integer types, got %s %s"
mess = mess % (arr1.dtype.type,arr2.dtype.type)
raise ValueError(mess)
if (arr1.size == 0) or (arr2.size == 0) :
mess="Error: arr1 and arr2 must each be non-zero length"
raise ValueError(mess)
# make sure that arr1 has unique values...
test=np.unique(arr1)
if test.size != arr1.size:
raise ValueError("Error: the arr1input must be unique")
# sort arr1 if not presorted
if not presorted:
st1 = np.argsort(arr1)
else:
st1 = None
# search the sorted array
sub1=np.searchsorted(arr1,arr2,sorter=st1)
# check for out-of-bounds at the high end if necessary
if (arr2.max() > arr1.max()) :
bad,=np.where(sub1 == arr1.size)
sub1[bad] = arr1.size-1
if not presorted:
sub2,=np.where(arr1[st1[sub1]] == arr2)
sub1=st1[sub1[sub2]]
else:
sub2,=np.where(arr1[sub1] == arr2)
sub1=sub1[sub2]
return sub1,sub2
def logsum(logX, axis=0):
"""Computes log of the sum along give axis from the log of the summands.
This method tries hard to avoid over- or underflow.
See appendix A of Bovy, <NAME> (2009).
Args:
logX: numpy array of logarithmic summands
axis (int): axis to sum over
Returns:
log of the sum, shortened by one axis
Throws:
ValueError if logX has length 0 along given axis
"""
floatinfo = np.finfo(logX.dtype)
underflow = np.log(floatinfo.tiny) - logX.min(axis=axis)
overflow = np.log(floatinfo.max) - logX.max(axis=axis) - np.log(logX.shape[axis])
c = np.where(underflow < overflow, underflow, overflow)
# adjust the shape of c for addition with logX
c_shape = [slice(None) for i in xrange(len(logX.shape))]
c_shape[axis] = None
return np.log(np.exp(logX + c[tuple(c_shape)]).sum(axis=axis)) - c
def chi2_cutoff(D, cutoff=3.):
"""D-dimensional eqiuvalent of "n sigma" cut.
Evaluates the quantile function of the chi-squared distribution to determine
the limit for the chi^2 of samples wrt to GMM so that they satisfy the
68-95-99.7 percent rule of the 1D Normal distribution.
Args:
D (int): dimensions of the feature space
cutoff (float): 1D equivalent cut [in units of sigma]
Returns:
float: upper limit for chi-squared in D dimensions
"""
cdf_1d = scipy.stats.norm.cdf(cutoff)
confidence_1d = 1-(1-cdf_1d)*2
cutoff_nd = scipy.stats.chi2.ppf(confidence_1d, D)
return cutoff_nd
def covar_callback_default(coords, default=None):
N,D = coords.shape
if default.shape != (D,D):
raise RuntimeError("covar_callback received improper default covariance %r" % default)
# no need to copy since a single covariance matrix is sufficient
# return np.tile(default, (N,1,1))
return default
class GMM(object):
"""Gaussian mixture model with K components in D dimensions.
Attributes:
amp: numpy array (K,), component amplitudes
mean: numpy array (K,D), component means
covar: numpy array (K,D,D), component covariances
"""
def __init__(self, K=0, D=0):
"""Create the arrays for amp, mean, covar."""
self.amp = np.zeros((K))
self.mean = np.empty((K,D))
self.covar = np.empty((K,D,D))
@property
def K(self):
"""int: number of components, depends on size of amp."""
return self.amp.size
@property
def D(self):
"""int: dimensions of the feature space."""
return self.mean.shape[1]
def save(self, filename, **kwargs):
"""Save GMM to file.
Args:
filename (str): name for saved file, should end on .npz as the default
of numpy.savez(), which is called here
kwargs: dictionary of additional information to be stored in file.
Returns:
None
"""
np.savez(filename, amp=self.amp, mean=self.mean, covar=self.covar, **kwargs)
def load(self, filename):
"""Load GMM from file.
Additional arguments stored by save() will be ignored.
Args:
filename (str): name for file create with save().
Returns:
None
"""
F = np.load(filename)
self.amp = F["amp"]
self.mean = F["mean"]
self.covar = F["covar"]
F.close()
@staticmethod
def from_file(filename):
"""Load GMM from file.
Additional arguments stored by save() will be ignored.
Args:
filename (str): name for file create with save().
Returns:
GMM
"""
gmm = GMM()
gmm.load(filename)
return gmm
def draw(self, size=1, rng=np.random):
"""Draw samples from the GMM.
Args:
size (int): number of samples to draw
rng: numpy.random.RandomState for deterministic draw
Returns:
numpy array (size,D)
"""
# draw indices for components given amplitudes, need to make sure: sum=1
ind = rng.choice(self.K, size=size, p=self.amp/self.amp.sum())
N = np.bincount(ind, minlength=self.K)
# for each component: draw as many points as in ind from a normal
samples = np.empty((size, self.D))
lower = 0
for k in np.flatnonzero(N):
upper = lower + N[k]
samples[lower:upper, :] = rng.multivariate_normal(self.mean[k], self.covar[k], size=N[k])
lower = upper
return samples
def __call__(self, coords, covar=None, as_log=False):
"""Evaluate model PDF at given coordinates.
see logL() for details.
Args:
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
as_log (bool): return log(p) instead p
Returns:
numpy array (1,) or (N, 1) of PDF (or its log)
"""
if as_log:
return self.logL(coords, covar=covar)
else:
return np.exp(self.logL(coords, covar=covar))
def _mp_chunksize(self):
# find how many components to distribute over available threads
cpu_count = multiprocessing.cpu_count()
chunksize = max(1, self.K//cpu_count)
n_chunks = min(cpu_count, self.K//chunksize)
return n_chunks, chunksize
def _get_chunks(self):
# split all component in ideal-sized chunks
n_chunks, chunksize = self._mp_chunksize()
left = self.K - n_chunks*chunksize
chunks = []
n = 0
for i in xrange(n_chunks):
n_ = n + chunksize
if left > i:
n_ += 1
chunks.append((n, n_))
n = n_
return chunks
def logL(self, coords, covar=None):
"""Log-likelihood of coords given all (i.e. the sum of) GMM components
Distributes computation over all threads on the machine.
If covar is None, this method returns
log(sum_k(p(x | k)))
of the data values x. If covar is set, the method returns
log(sum_k(p(y | k))),
where y = x + noise and noise ~ N(0, covar).
Args:
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
Returns:
numpy array (1,) or (N, 1) log(L), depending on shape of data
"""
# Instead log p (x | k) for each k (which is huge)
# compute it in stages: first for each chunk, then sum over all chunks
pool = multiprocessing.Pool()
chunks = self._get_chunks()
results = [pool.apply_async(self._logsum_chunk, (chunk, coords, covar)) for chunk in chunks]
log_p_y_chunk = []
for r in results:
log_p_y_chunk.append(r.get())
pool.close()
pool.join()
return logsum(np.array(log_p_y_chunk)) # sum over all chunks = all k
def _logsum_chunk(self, chunk, coords, covar=None):
# helper function to reduce the memory requirement of logL
log_p_y_k = np.empty((chunk[1]-chunk[0], len(coords)))
for i in xrange(chunk[1] - chunk[0]):
k = chunk[0] + i
log_p_y_k[i,:] = self.logL_k(k, coords, covar=covar)
return logsum(log_p_y_k)
def logL_k(self, k, coords, covar=None, chi2_only=False):
"""Log-likelihood of coords given only component k.
Args:
k (int): component index
coords: numpy array (D,) or (N, D) of test coordinates
covar: numpy array (D, D) or (N, D, D) covariance matrix of coords
chi2_only (bool): only compute deltaX^T Sigma_k^-1 deltaX
Returns:
numpy array (1,) or (N, 1) log(L), depending on shape of data
"""
# compute p(x | k)
dx = coords - self.mean[k]
if covar is None:
T_k = self.covar[k]
else:
T_k = self.covar[k] + covar
chi2 = np.einsum('...i,...ij,...j', dx, np.linalg.inv(T_k), dx)
if chi2_only:
return chi2
# prevent tiny negative determinants to mess up
(sign, logdet) = np.linalg.slogdet(T_k)
log2piD2 = np.log(2*np.pi)*(0.5*self.D)
return np.log(self.amp[k]) - log2piD2 - sign*logdet/2 - chi2/2
class Background(object):
"""Background object to be used in conjuction with GMM.
For a normalizable uniform distribution, a support footprint must be set.
It should be sufficiently large to explain all non-clusters samples.
Attributes:
amp (float): mixing amplitude
footprint: numpy array, (D,2) of rectangular volume
adjust_amp (bool): whether amp will be adjusted as part of the fit
amp_max (float): maximum value of amp allowed if adjust_amp=True
"""
def __init__(self, footprint, amp=0):
"""Initialize Background with a footprint.
Args:
footprint: numpy array, (D,2) of rectangular volume
Returns:
None
"""
self.amp = amp
self.footprint = footprint
self.adjust_amp = True
self.amp_max = 1
self.amp_min = 0
@property
def p(self):
"""Probability of the background model.
Returns:
float, equal to 1/volume, where volume is given by footprint.
"""
volume = np.prod(self.footprint[1] - self.footprint[0])
return 1/volume
def draw(self, size=1, rng=np.random):
"""Draw samples from uniform background.
Args:
size (int): number of samples to draw
rng: numpy.random.RandomState for deterministic draw
Returns:
numpy array (size, D)
"""
dx = self.footprint[1] - self.footprint[0]
return self.footprint[0] + dx*rng.rand(size,len(self.footprint[0]))
############################
# Begin of fit functions
############################
def initFromDataMinMax(gmm, data, covar=None, s=None, k=None, rng=np.random):
"""Initialization callback for uniform random component means.
Component amplitudes are set at 1/gmm.K, covariances are set to
s**2*np.eye(D), and means are distributed randomly over the range that is
covered by data.
If s is not given, it will be set such that the volume of all components
completely fills the space covered by data.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
s (float): if set, sets component variances
k (iterable): list of components to set, is None sets all components
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
if k is None:
k = slice(None)
gmm.amp[k] = 1/gmm.K
# set model to random positions with equally sized spheres within
# volumne spanned by data
min_pos = data.min(axis=0)
max_pos = data.max(axis=0)
gmm.mean[k,:] = min_pos + (max_pos-min_pos)*rng.rand(gmm.K, gmm.D)
# if s is not set: use volume filling argument:
# K spheres of radius s [having volume s^D * pi^D/2 / gamma(D/2+1)]
# should completely fill the volume spanned by data.
if s is None:
vol_data = np.prod(max_pos-min_pos)
s = (vol_data / gmm.K * scipy.special.gamma(gmm.D*0.5 + 1))**(1/gmm.D) / np.sqrt(np.pi)
logger.info("initializing spheres with s=%.2f in data domain" % s)
gmm.covar[k,:,:] = s**2 * np.eye(data.shape[1])
def initFromDataAtRandom(gmm, data, covar=None, s=None, k=None, rng=np.random):
"""Initialization callback for component means to follow data on scales > s.
Component amplitudes are set to 1/gmm.K, covariances are set to
s**2*np.eye(D). For each mean, a data sample is selected at random, and a
multivariant Gaussian offset is added, whose variance is given by s**2.
If s is not given, it will be set such that the volume of all components
completely fills the space covered by data.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
s (float): if set, sets component variances
k (iterable): list of components to set, is None sets all components
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
if k is None:
k = slice(None)
k_len = gmm.K
else:
try:
k_len = len(gmm.amp[k])
except TypeError:
k_len = 1
gmm.amp[k] = 1/gmm.K
# initialize components around data points with uncertainty s
refs = rng.randint(0, len(data), size=k_len)
D = data.shape[1]
if s is None:
min_pos = data.min(axis=0)
max_pos = data.max(axis=0)
vol_data = np.prod(max_pos-min_pos)
s = (vol_data / gmm.K * scipy.special.gamma(gmm.D*0.5 + 1))**(1/gmm.D) / np.sqrt(np.pi)
logger.info("initializing spheres with s=%.2f near data points" % s)
gmm.mean[k,:] = data[refs] + rng.multivariate_normal(np.zeros(D), s**2 * np.eye(D), size=k_len)
gmm.covar[k,:,:] = s**2 * np.eye(data.shape[1])
def initFromKMeans(gmm, data, covar=None, rng=np.random):
"""Initialization callback from a k-means clustering run.
See Algorithm 1 from Bloemer & Bujna (arXiv:1312.5946)
NOTE: The result of this call are not deterministic even if rng is set
because scipy.cluster.vq.kmeans2 uses its own initialization.
Args:
gmm: A GMM to be initialized
data: numpy array (N,D) to define the range of the component means
covar: ignored in this callback
rng: numpy.random.RandomState for deterministic behavior
Returns:
None
"""
from scipy.cluster.vq import kmeans2
center, label = kmeans2(data, gmm.K)
for k in xrange(gmm.K):
mask = (label == k)
gmm.amp[k] = mask.sum() / len(data)
gmm.mean[k,:] = data[mask].mean(axis=0)
d_m = data[mask] - gmm.mean[k]
# funny way of saying: for each point i, do the outer product
# of d_m with its transpose and sum over i
gmm.covar[k,:,:] = (d_m[:, :, None] * d_m[:, None, :]).sum(axis=0) / len(data)
def fit(gmm, data, covar=None, R=None, init_method='random', w=0., cutoff=None, sel_callback=None, oversampling=10, covar_callback=None, background=None, tol=1e-3, miniter=1, maxiter=1000, frozen=None, split_n_merge=False, rng=np.random):
"""Fit GMM to data.
If given, init_callback is called to set up the GMM components. Then, the
EM sequence is repeated until the mean log-likelihood converges within tol.
Args:
gmm: an instance if GMM
data: numpy array (N,D)
covar: sample noise covariance; numpy array (N,D,D) or (D,D) if i.i.d.
R: sample projection matrix; numpy array (N,D,D)
init_method (string): one of ['random', 'minmax', 'kmeans', 'none']
defines the method to initialize the GMM components
w (float): minimum covariance regularization
cutoff (float): size of component neighborhood [in 1D equivalent sigmas]
sel_callback: completeness callback to generate imputation samples.
oversampling (int): number of imputation samples per data sample.
only used if sel_callback is set.
value of 1 is fine but results are noisy. Set as high as feasible.
covar_callback: covariance callback for imputation samples.
needs to be present if sel_callback and covar are set.
background: an instance of Background if simultaneous fitting is desired
tol (float): tolerance for covergence of mean log-likelihood
maxiter (int): maximum number of iterations of EM
frozen (iterable or dict): index list of components that are not updated
split_n_merge (int): number of split & merge attempts
rng: numpy.random.RandomState for deterministic behavior
Notes:
If frozen is a simple list, it will be assumed that is applies to mean
and covariance of the specified components. It can also be a dictionary
with the keys "mean" and "covar" to specify them separately.
In either case, amplitudes will be updated to reflect any changes made.
If frozen["amp"] is set, it will use this list instead.
Returns:
mean log-likelihood (float), component neighborhoods (list of ints)
Throws:
RuntimeError for inconsistent argument combinations
"""
N = len(data)
# if there are data (features) missing, i.e. masked as np.nan, set them to zeros
# and create/set covariance elements to very large value to reduce its weight
# to effectively zero
missing = np.isnan(data)
if missing.any():
data_ = createShared(data.copy())
data_[missing] = 0 # value does not matter as long as it's not nan
if covar is None:
covar = np.zeros((gmm.D, gmm.D))
# need to create covar_callback if imputation is requested
if sel_callback is not None:
from functools import partial
covar_callback = partial(covar_callback_default, default=np.zeros((gmm.D, gmm.D)))
if covar.shape == (gmm.D, gmm.D):
covar_ = createShared(np.tile(covar, (N,1,1)))
else:
covar_ = createShared(covar.copy())
large = 1e10
for d in range(gmm.D):
covar_[missing[:,d],d,d] += large
covar_[missing[:,d],d,d] += large
else:
data_ = createShared(data.copy())
if covar is None or covar.shape == (gmm.D, gmm.D):
covar_ = covar
else:
covar_ = createShared(covar.copy())
# init components
if init_method.lower() not in ['random', 'minmax', 'kmeans', 'none']:
raise NotImplementedError("init_mehod %s not in ['random', 'minmax', 'kmeans', 'none']" % init_method)
if init_method.lower() == 'random':
initFromDataAtRandom(gmm, data_, covar=covar_, rng=rng)
if init_method.lower() == 'minmax':
initFromDataMinMax(gmm, data_, covar=covar_, rng=rng)
if init_method.lower() == 'kmeans':
initFromKMeans(gmm, data_, covar=covar_, rng=rng)
# test if callbacks are consistent
if sel_callback is not None and covar is not None and covar_callback is None:
raise NotImplementedError("covar is set, but covar_callback is None: imputation samples inconsistent")
# set up pool
pool = multiprocessing.Pool()
n_chunks, chunksize = gmm._mp_chunksize()
# containers
# precautions for cases when some points are treated as outliers
# and not considered as belonging to any component
log_S = createShared(np.zeros(N)) # S = sum_k p(x|k)
log_p = [[] for k in xrange(gmm.K)] # P = p(x|k) for x in U[k]
T_inv = [None for k in xrange(gmm.K)] # T = covar(x) + gmm.covar[k]
U = [None for k in xrange(gmm.K)] # U = {x close to k}
p_bg = None
if background is not None:
gmm.amp *= 1 - background.amp # GMM amp + BG amp = 1
p_bg = [None] # p_bg = p(x|BG), no log because values are larger
if covar is not None:
# check if covar is diagonal and issue warning if not
mess = "background model will only consider diagonal elements of covar"
nondiag = ~np.eye(gmm.D, dtype='bool')
if covar.shape == (gmm.D, gmm.D):
if (covar[nondiag] != 0).any():
logger.warning(mess)
else:
if (covar[np.tile(nondiag,(N,1,1))] != 0).any():
logger.warning(mess)
# check if all component parameters can be changed
changeable = {"amp": slice(None), "mean": slice(None), "covar": slice(None)}
if frozen is not None:
if all(isinstance(item, int) for item in frozen):
changeable['amp'] = changeable['mean'] = changeable['covar'] = np.in1d(xrange(gmm.K), frozen, assume_unique=True, invert=True)
elif hasattr(frozen, 'keys') and np.in1d(["amp","mean","covar"], tuple(frozen.keys()), assume_unique=True).any():
if "amp" in frozen.keys():
changeable['amp'] = np.in1d(xrange(gmm.K), frozen['amp'], assume_unique=True, invert=True)
if "mean" in frozen.keys():
changeable['mean'] = np.in1d(xrange(gmm.K), frozen['mean'], assume_unique=True, invert=True)
if "covar" in frozen.keys():
changeable['covar'] = np.in1d(xrange(gmm.K), frozen['covar'], assume_unique=True, invert=True)
else:
raise NotImplementedError("frozen should be list of indices or dictionary with keys in ['amp','mean','covar']")
try:
log_L, N, N2 = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, changeable=changeable, miniter=miniter, maxiter=maxiter, tol=tol, rng=rng)
except Exception:
# cleanup
pool.close()
pool.join()
del data_, covar_, log_S
raise
# should we try to improve by split'n'merge of components?
# if so, keep backup copy
gmm_ = None
if frozen is not None and split_n_merge:
logger.warning("forgoing split'n'merge because some components are frozen")
else:
while split_n_merge and gmm.K >= 3:
if gmm_ is None:
gmm_ = GMM(gmm.K, gmm.D)
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
U_ = [U[k].copy() for k in xrange(gmm.K)]
changing, cleanup = _findSNMComponents(gmm, U, log_p, log_S, N+N2, pool=pool, chunksize=chunksize)
logger.info("merging %d and %d, splitting %d" % tuple(changing))
# modify components
_update_snm(gmm, changing, U, N+N2, cleanup)
# run partial EM on changeable components
# NOTE: for a partial run, we'd only need the change to Log_S from the
# changeable components. However, the neighborhoods can change from _update_snm
# or because they move, so that operation is ill-defined.
# Thus, we'll always run a full E-step, which is pretty cheap for
# converged neighborhood.
# The M-step could in principle be run on the changeable components only,
# but there seem to be side effects in what I've tried.
# Similar to the E-step, the imputation step needs to be run on all
# components, otherwise the contribution of the changeable ones to the mixture
# would be over-estimated.
# Effectively, partial runs are as expensive as full runs.
changeable['amp'] = changeable['mean'] = changeable['covar'] = np.in1d(xrange(gmm.K), changing, assume_unique=True)
log_L_, N_, N2_ = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, maxiter=maxiter, tol=tol, prefix="SNM_P", changeable=changeable, rng=rng)
changeable['amp'] = changeable['mean'] = changeable['covar'] = slice(None)
log_L_, N_, N2_ = _EM(gmm, log_p, U, T_inv, log_S, data_, covar=covar_, R=R, sel_callback=sel_callback, oversampling=oversampling, covar_callback=covar_callback, w=w, pool=pool, chunksize=chunksize, cutoff=cutoff, background=background, p_bg=p_bg, maxiter=maxiter, tol=tol, prefix="SNM_F", changeable=changeable, rng=rng)
if log_L >= log_L_:
# revert to backup
gmm.amp[:] = gmm_.amp[:]
gmm.mean[:] = gmm_.mean[:,:]
gmm.covar[:,:,:] = gmm_.covar[:,:,:]
U = U_
logger.info ("split'n'merge likelihood decreased: reverting to previous model")
break
log_L = log_L_
split_n_merge -= 1
pool.close()
pool.join()
del data_, covar_, log_S
return log_L, U
# run EM sequence
def _EM(gmm, log_p, U, T_inv, log_S, data, covar=None, R=None, sel_callback=None, oversampling=10, covar_callback=None, background=None, p_bg=None, w=0, pool=None, chunksize=1, cutoff=None, miniter=1, maxiter=1000, tol=1e-3, prefix="", changeable=None, rng=np.random):
# compute effective cutoff for chi2 in D dimensions
if cutoff is not None:
# note: subsequently the cutoff parameter, e.g. in _E(), refers to this:
# chi2 < cutoff,
# while in fit() it means e.g. "cut at 3 sigma".
# These differing conventions need to be documented well.
cutoff_nd = chi2_cutoff(gmm.D, cutoff=cutoff)
# store chi2 cutoff for component shifts, use 0.5 sigma
shift_cutoff = chi2_cutoff(gmm.D, cutoff=min(0.1, cutoff/2))
else:
cutoff_nd = None
shift_cutoff = chi2_cutoff(gmm.D, cutoff=0.1)
if sel_callback is not None:
omega = createShared(sel_callback(data).astype("float"))
if np.any(omega == 0):
logger.warning("Selection probability Omega = 0 for an observed sample.")
logger.warning("Selection callback likely incorrect! Bad things will happen!")
else:
omega = None
it = 0
header = "ITER\tSAMPLES"
if sel_callback is not None:
header += "\tIMPUTED\tORIG"
if background is not None:
header += "\tBG_AMP"
header += "\tLOG_L\tSTABLE"
logger.info(header)
# save backup
gmm_ = GMM(gmm.K, gmm.D)
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:,:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
N0 = len(data) # size of original (unobscured) data set (signal and background)
N2 = 0 # size of imputed signal sample
if background is not None:
bg_amp_ = background.amp
while it < maxiter: # limit loop in case of slow convergence
log_L_, N, N2_, N0_ = _EMstep(gmm, log_p, U, T_inv, log_S, N0, data, covar=covar, R=R, sel_callback=sel_callback, omega=omega, oversampling=oversampling, covar_callback=covar_callback, background=background, p_bg=p_bg , w=w, pool=pool, chunksize=chunksize, cutoff=cutoff_nd, tol=tol, changeable=changeable, it=it, rng=rng)
# check if component has moved by more than sigma/2
shift2 = np.einsum('...i,...ij,...j', gmm.mean - gmm_.mean, np.linalg.inv(gmm_.covar), gmm.mean - gmm_.mean)
moved = np.flatnonzero(shift2 > shift_cutoff)
status_mess = "%s%d\t%d" % (prefix, it, N)
if sel_callback is not None:
status_mess += "\t%.2f\t%.2f" % (N2_, N0_)
if background is not None:
status_mess += "\t%.3f" % bg_amp_
status_mess += "\t%.3f\t%d" % (log_L_, gmm.K - moved.size)
logger.info(status_mess)
# convergence tests
if it > miniter:
if sel_callback is None:
if np.abs(log_L_ - log_L) < tol * np.abs(log_L) and moved.size == 0:
log_L = log_L_
logger.info("likelihood converged within relative tolerance %r: stopping here." % tol)
break
else:
if np.abs(N0_ - N0) < tol * N0 and np.abs(N2_ - N2) < tol * N2 and moved.size == 0:
log_L = log_L_
logger.info("imputation sample size converged within relative tolerance %r: stopping here." % tol)
break
# force update to U for all moved components
if cutoff is not None:
for k in moved:
U[k] = None
if moved.size:
logger.debug("resetting neighborhoods of moving components: (" + ("%d," * moved.size + ")") % tuple(moved))
# update all important _ quantities for convergence test(s)
log_L = log_L_
N0 = N0_
N2 = N2_
# backup to see if components move or if next step gets worse
# note: not gmm = gmm_ !
gmm_.amp[:] = gmm.amp[:]
gmm_.mean[:,:] = gmm.mean[:,:]
gmm_.covar[:,:,:] = gmm.covar[:,:,:]
if background is not None:
bg_amp_ = background.amp
it += 1
return log_L, N, N2
# run one EM step
def _EMstep(gmm, log_p, U, T_inv, log_S, N0, data, covar=None, R=None, sel_callback=None, omega=None, oversampling=10, covar_callback=None, background=None, p_bg=None, w=0, pool=None, chunksize=1, cutoff=None, tol=1e-3, changeable=None, it=0, rng=np.random):
# NOTE: T_inv (in fact (T_ik)^-1 for all samples i and components k)
# is very large and is unfortunately duplicated in the parallelized _Mstep.
# If memory is too limited, one can recompute T_inv in _Msums() instead.
log_L = _Estep(gmm, log_p, U, T_inv, log_S, data, covar=covar, R=R, omega=omega, background=background, p_bg=p_bg, pool=pool, chunksize=chunksize, cutoff=cutoff, it=it)
A,M,C,N,B = _Mstep(gmm, U, log_p, T_inv, log_S, data, covar=covar, R=R, p_bg=p_bg, pool=pool, chunksize=chunksize)
A2 = M2 = C2 = B2 = N2 = 0
# here the magic happens: imputation from the current model
if sel_callback is not None:
# if there are projections / missing data, we don't know how to
# generate those for the imputation samples
# NOTE: in principle, if there are only missing data, i.e. R is 1_D,
# we could ignore missingness for data2 because we'll do an analytic
# marginalization. This doesn't work if R is a non-trivial matrix.
if R is not None:
raise NotImplementedError("R is not None: imputation samples likely inconsistent")
# create fake data with same mechanism as the original data,
# but invert selection to get the missing part
data2, covar2, N0, omega2 = draw(gmm, len(data)*oversampling, sel_callback=sel_callback, orig_size=N0*oversampling, invert_sel=True, covar_callback=covar_callback, background=background, rng=rng)
data2 = createShared(data2)
if not(covar2 is None or covar2.shape == (gmm.D, gmm.D)):
covar2 = createShared(covar2)
N0 = N0/oversampling
U2 = [None for k in xrange(gmm.K)]
if len(data2) > 0:
log_S2 = np.zeros(len(data2))
log_p2 = [[] for k in xrange(gmm.K)]
T2_inv = [None for k in xrange(gmm.K)]
R2 = None
if background is not None:
p_bg2 = [None]
else:
p_bg2 = None
log_L2 = _Estep(gmm, log_p2, U2, T2_inv, log_S2, data2, covar=covar2, R=R2, omega=None, background=background, p_bg=p_bg2, pool=pool, chunksize=chunksize, cutoff=cutoff, it=it)
A2,M2,C2,N2,B2 = _Mstep(gmm, U2, log_p2, T2_inv, log_S2, data2, covar=covar2, R=R2, p_bg=p_bg2, pool=pool, chunksize=chunksize)
# normalize for oversampling
A2 /= oversampling
M2 /= oversampling
C2 /= oversampling
B2 /= oversampling
N2 = N2/oversampling # need floating point precision in update
# check if components have outside selection
sel_outside = A2 > tol * A
if sel_outside.any():
logger.debug("component inside fractions: " + ("(" + "%.2f," * gmm.K + ")") % tuple(A/(A+A2)))
# correct the observed likelihood for the overall normalization constant of
# of the data process with selection:
# logL(x | gmm) = sum_k p_k(x) / Z(gmm), with Z(gmm) = int dx sum_k p_k(x) = 1
# becomes
# logL(x | gmm) = sum_k Omega(x) p_k(x) / Z'(gmm),
# with Z'(gmm) = int dx Omega(x) sum_k p_k(x), which we can gt by MC integration
log_L -= N * np.log((omega.sum() + omega2.sum() / oversampling) / (N + N2))
_update(gmm, A, M, C, N, B, A2, M2, C2, N2, B2, w, changeable=changeable, background=background)
return log_L, N, N2, N0
# perform E step calculations.
# If cutoff is set, this will also set the neighborhoods U
def _Estep(gmm, log_p, U, T_inv, log_S, data, covar=None, R=None, omega=None, background=None, p_bg=None, pool=None, chunksize=1, cutoff=None, it=0, rng=np.random):
# compute p(i | k) for each k independently in the pool
# need S = sum_k p(i | k) for further calculation
log_S[:] = 0
# H = {i | i in neighborhood[k]} for any k, needed for outliers below
# TODO: Use only when cutoff is set
H = np.zeros(len(data), dtype="bool")
k = 0
for log_p[k], U[k], T_inv[k] in \
parmap.starmap(_Esum, zip(xrange(gmm.K), U), gmm, data, covar, R, cutoff, pool=pool, chunksize=chunksize):
log_S[U[k]] += np.exp(log_p[k]) # actually S, not logS
H[U[k]] = 1
k += 1
if background is not None:
p_bg[0] = background.amp * background.p
if covar is not None:
# This is the zeroth moment of a truncated Normal error distribution
# Its calculation is simple only of the covariance is diagonal!
# See e.g. Manjunath & Wilhem (2012) if not
error = np.ones(len(data))
x0,x1 = background.footprint
for d in range(gmm.D):
if covar.shape == (gmm.D, gmm.D): # one-for-all
denom = np.sqrt(2 * covar[d,d])
else:
denom = np.sqrt(2 * covar[:,d,d])
# CAUTION: The erf is approximate and returns 0
# Thus, we don't add the logs but multiple the value itself
# underrun is not a big problem here
error *= np.real(scipy.special.erf((data[:,d] - x0[d])/denom) - scipy.special.erf((data[:,d] - x1[d])/denom)) / 2
p_bg[0] *= error
log_S[:] = np.log(log_S + p_bg[0])
if omega is not None:
log_S += np.log(omega)
log_L = log_S.sum()
else:
# need log(S), but since log(0) isn't a good idea, need to restrict to H
log_S[H] = np.log(log_S[H])
if omega is not None:
log_S += np.log(omega)
log_L = log_S[H].sum()
return log_L
# compute chi^2, and apply selections on component neighborhood based in chi^2
def _Esum(k, U_k, gmm, data, covar=None, R=None, cutoff=None):
# since U_k could be None, need explicit reshape
d_ = data[U_k].reshape(-1, gmm.D)
if covar is not None:
if covar.shape == (gmm.D, gmm.D): # one-for-all
covar_ = covar
else: # each datum has covariance
covar_ = covar[U_k].reshape(-1, gmm.D, gmm.D)
else:
covar_ = 0
if R is not None:
R_ = R[U_k].reshape(-1, gmm.D, gmm.D)
# p(x | k) for all x in the vicinity of k
# determine all points within cutoff sigma from mean[k]
if R is None:
dx = d_ - gmm.mean[k]
else:
dx = d_ - np.dot(R_, gmm.mean[k])
if covar is None and R is None:
T_inv_k = None
chi2 = np.einsum('...i,...ij,...j', dx, np.linalg.inv(gmm.covar[k]), dx)
else:
# with data errors: need to create and return T_ik = covar_i + C_k
# and weight each datum appropriately
if R is None:
T_inv_k = np.linalg.inv(gmm.covar[k] + covar_)
else: # need to project out missing elements: T_ik = R_i C_k R_i^R + covar_i
T_inv_k = np.linalg.inv(np.einsum('...ij,jk,...lk', R_, gmm.covar[k], R_) + covar_)
chi2 = np.einsum('...i,...ij,...j', dx, T_inv_k, dx)
# NOTE: close to convergence, we could stop applying the cutoff because
# changes to U will be minimal
if cutoff is not None:
indices = chi2 < cutoff
chi2 = chi2[indices]
if (covar is not None and covar.shape != (gmm.D, gmm.D)) or R is not None:
T_inv_k = T_inv_k[indices]
if U_k is None:
U_k = np.flatnonzero(indices)
else:
U_k = U_k[indices]
# prevent tiny negative determinants to mess up
if covar is None:
(sign, logdet) = np.linalg.slogdet(gmm.covar[k])
else:
(sign, logdet) = np.linalg.slogdet(T_inv_k)
sign *= -1 # since det(T^-1) = 1/det(T)
log2piD2 = np.log(2*np.pi)*(0.5*gmm.D)
return np.log(gmm.amp[k]) - log2piD2 - sign*logdet/2 - chi2/2, U_k, T_inv_k
# get zeroth, first, second moments of the data weighted with p_k(x) avgd over x
def _Mstep(gmm, U, log_p, T_inv, log_S, data, covar=None, R=None, p_bg=None, pool=None, chunksize=1):
# save the M sums from observed data
A = np.empty(gmm.K) # sum for amplitudes
M = np.empty((gmm.K, gmm.D)) # ... means
C = np.empty((gmm.K, gmm.D, gmm.D)) # ... covariances
N = len(data)
# perform sums for M step in the pool
# NOTE: in a partial run, could work on changeable components only;
# however, there seem to be side effects or race conditions
k = 0
for A[k], M[k,:], C[k,:,:] in \
parmap.starmap(_Msums, zip(xrange(gmm.K), U, log_p, T_inv), gmm, data, R, log_S, pool=pool, chunksize=chunksize):
k += 1
if p_bg is not None:
q_bg = p_bg[0] / np.exp(log_S)
B = q_bg.sum() # equivalent to A_k in _Msums, but done without logs
else:
B = 0
return A,M,C,N,B
# compute moments for the Mstep
def _Msums(k, U_k, log_p_k, T_inv_k, gmm, data, R, log_S):
if log_p_k.size == 0:
return 0,0,0
# get log_q_ik by dividing with S = sum_k p_ik
# NOTE: this modifies log_p_k in place, but is only relevant
# within this method since the call is parallel and its arguments
# therefore don't get updated across components.
# NOTE: reshape needed when U_k is None because of its
# implicit meaning as np.newaxis
log_p_k -= log_S[U_k].reshape(log_p_k.size)
d = data[U_k].reshape((log_p_k.size, gmm.D))
if R is not None:
R_ = R[U_k].reshape((log_p_k.size, gmm.D, gmm.D))
# amplitude: A_k = sum_i q_ik
A_k = np.exp(logsum(log_p_k))
# in fact: q_ik, but we treat sample index i silently everywhere
q_k = np.exp(log_p_k)
if R is None:
d_m = d - gmm.mean[k]
else:
d_m = d - np.dot(R_, gmm.mean[k])
# data with errors?
if T_inv_k is None and R is None:
# mean: M_k = sum_i x_i q_ik
M_k = (d * q_k[:,None]).sum(axis=0)
# covariance: C_k = sum_i (x_i - mu_k)^T(x_i - mu_k) q_ik
# funny way of saying: for each point i, do the outer product
# of d_m with its transpose, multiply with pi[i], and sum over i
C_k = (q_k[:, None, None] * d_m[:, :, None] * d_m[:, None, :]).sum(axis=0)
else:
if R is None: # that means T_ik is not None
# b_ik = mu_k + C_k T_ik^-1 (x_i - mu_k)
# B_ik = C_k - C_k T_ik^-1 C_k
b_k = gmm.mean[k] + np.einsum('ij,...jk,...k', gmm.covar[k], T_inv_k, d_m)
B_k = gmm.covar[k] - np.einsum('ij,...jk,...kl', gmm.covar[k], T_inv_k, gmm.covar[k])
else:
# F_ik = C_k R_i^T T_ik^-1
F_k = np.einsum('ij,...kj,...kl', gmm.covar[k], R_, T_inv_k)
b_k = gmm.mean[k] + np.einsum('...ij,...j', F_k, d_m)
B_k = gmm.covar[k] - np.einsum('...ij,...jk,kl', F_k, R_, gmm.covar[k])
#b_k = gmm.mean[k] + np.einsum('ij,...jk,...k', gmm.covar[k], T_inv_k, d_m)
#B_k = gmm.covar[k] - np.einsum('ij,...jk,...kl', gmm.covar[k], T_inv_k, gmm.covar[k])
M_k = (b_k * q_k[:,None]).sum(axis=0)
b_k -= gmm.mean[k]
C_k = (q_k[:, None, None] * (b_k[:, :, None] * b_k[:, None, :] + B_k)).sum(axis=0)
return A_k, M_k, C_k
# update component with the moment matrices.
# If changeable is set, update only those components and renormalize the amplitudes
def _update(gmm, A, M, C, N, B, A2, M2, C2, N2, B2, w, changeable=None, background=None):
# recompute background amplitude
if background is not None and background.adjust_amp:
background.amp = max(min((B + B2) / (N + N2), background.amp_max), background.amp_min)
# amp update:
# for partial update: need to update amp for any component that is changeable
if not hasattr(changeable['amp'], '__iter__'): # it's a slice(None), not a bool array
gmm.amp[changeable['amp']] = (A + A2)[changeable['amp']] / (N + N2)
else:
# Bovy eq. 31, with correction for bg.amp if needed
if background is None:
total = 1
else:
total = 1 - background.amp
gmm.amp[changeable['amp']] = (A + A2)[changeable['amp']] / (A + A2)[changeable['amp']].sum() * (total - (gmm.amp[~changeable['amp']]).sum())
# mean updateL
gmm.mean[changeable['mean'],:] = (M + M2)[changeable['mean'],:]/(A + A2)[changeable['mean'],None]
# covar updateL
# minimum covariance term?
if w > 0:
# we assume w to be a lower bound of the isotropic dispersion,
# C_k = w^2 I + ...
# then eq. 38 in Bovy et al. only ~works for N = 0 because of the
# prefactor 1 / (q_j + 1) = 1 / (A + 1) in our terminology
# On average, q_j = N/K, so we'll adopt that to correct.
w_eff = w**2 * ((N+N2)/gmm.K + 1)
gmm.covar[changeable['covar'],:,:] = (C + C2 + w_eff*np.eye(gmm.D)[None,:,:])[changeable['covar'],:,:] / (A + A2 + 1)[changeable['covar'],None,None]
else:
gmm.covar[changeable['covar'],:,:] = (C + C2)[changeable['covar'],:,:] / (A + A2)[changeable['covar'],None,None]
# draw from the model (+ background) and apply appropriate covariances
def _drawGMM_BG(gmm, size, covar_callback=None, background=None, rng=np.random):
# draw sample from model, or from background+model
if background is None:
data2 = gmm.draw(int(np.round(size)), rng=rng)
else:
# model is GMM + Background
bg_size = int(background.amp * size)
data2 = np.concatenate((gmm.draw(int(np.round(size-bg_size)), rng=rng), background.draw(int(np.round(bg_size)), rng=rng)))
# add noise
# NOTE: When background is set, adding noise is problematic if
# scattering them out is more likely than in.
# This can be avoided when the background footprint is large compared to
# selection region
if covar_callback is not None:
covar2 = covar_callback(data2)
if covar2.shape == (gmm.D, gmm.D): # one-for-all
noise = rng.multivariate_normal(np.zeros(gmm.D), covar2, size=len(data2))
else:
# create noise from unit covariance and then dot with eigenvalue
# decomposition of covar2 to get a the right noise distribution:
# n' = R V^1/2 n, where covar = R V R^-1
# faster than drawing one sample per each covariance
noise = rng.multivariate_normal(np.zeros(gmm.D), np.eye(gmm.D), size=len(data2))
val, rot = np.linalg.eigh(covar2)
val = np.maximum(val,0) # to prevent univariate errors to underflow
noise = np.einsum('...ij,...j', rot, np.sqrt(val)*noise)
data2 += noise
else:
covar2 = None
return data2, covar2
def draw(gmm, obs_size, sel_callback=None, invert_sel=False, orig_size=None, covar_callback=None, background=None, rng=np.random):
"""Draw from the GMM (and the Background) with noise and selection.
Draws orig_size samples from the GMM and the Background, if set; calls
covar_callback if set and applies resulting covariances; the calls
sel_callback on the (noisy) samples and returns those matching ones.
If the number is resulting samples is inconsistent with obs_size, i.e.
outside of the 68 percent confidence limit of a Poisson draw, it will
update its estimate for the original sample size orig_size.
An estimate can be provided with orig_size, otherwise it will use obs_size.
Note:
If sel_callback is set, the number of returned samples is not
necessarily given by obs_size.
Args:
gmm: an instance if GMM
obs_size (int): number of observed samples
sel_callback: completeness callback to generate imputation samples.
invert_sel (bool): whether to invert the result of sel_callback
orig_size (int): an estimate of the original size of the sample.
background: an instance of Background
covar_callback: covariance callback for imputation samples.
rng: numpy.random.RandomState for deterministic behavior
Returns:
sample: nunmpy array (N_orig, D)
covar_sample: numpy array (N_orig, D, D) or None of covar_callback=None
N_orig (int): updated estimate of orig_size if sel_callback is set
Throws:
RuntimeError for inconsistent argument combinations
"""
if orig_size is None:
orig_size = int(obs_size)
# draw from model (with background) and add noise.
# TODO: may want to decide whether to add noise before selection or after
# Here we do noise, then selection, but this is not fundamental
data, covar = _drawGMM_BG(gmm, orig_size, covar_callback=covar_callback, background=background, rng=rng)
# apply selection
if sel_callback is not None:
omega = sel_callback(data)
sel = rng.rand(len(data)) < omega
# check if predicted observed size is consistent with observed data
# 68% confidence interval for Poisson variate: observed size
alpha = 0.32
lower = 0.5*scipy.stats.chi2.ppf(alpha/2, 2*obs_size)
upper = 0.5*scipy.stats.chi2.ppf(1 - alpha/2, 2*obs_size + 2)
obs_size_ = sel.sum()
while obs_size_ > upper or obs_size_ < lower:
orig_size = int(orig_size / obs_size_ * obs_size)
data, covar = _drawGMM_BG(gmm, orig_size, covar_callback=covar_callback, background=background, rng=rng)
omega = sel_callback(data)
sel = rng.rand(len(data)) < omega
obs_size_ = sel.sum()
if invert_sel:
sel = ~sel
data = data[sel]
omega = omega[sel]
if covar_callback is not None and covar.shape != (gmm.D, gmm.D):
covar = covar[sel]
return data, covar, orig_size, omega
def _JS(k, gmm, log_p, log_S, U, A):
# compute Kullback-Leiber divergence
log_q_k = log_p[k] - log_S[U[k]]
return np.dot(np.exp(log_q_k), log_q_k - np.log(A[k]) - log_p[k] + np.log(gmm.amp[k])) / A[k]
def _findSNMComponents(gmm, U, log_p, log_S, N, pool=None, chunksize=1):
# find those components that are most similar
JM = np.zeros((gmm.K, gmm.K))
# compute log_q (posterior for k given i), but use normalized probabilities
# to allow for merging of empty components
log_q = [log_p[k] - log_S[U[k]] - np.log(gmm.amp[k]) for k in xrange(gmm.K)]
for k in xrange(gmm.K):
# don't need diagonal (can merge), and JM is symmetric
for j in xrange(k+1, gmm.K):
# get index list for intersection of U of k and l
# FIXME: match1d fails if either U is empty
# SOLUTION: merge empty U, split another
i_k, i_j = match1d(U[k], U[j], presorted=True)
JM[k,j] = np.dot(np.exp(log_q[k][i_k]), np.exp(log_q[j][i_j]))
merge_jk = np.unravel_index(JM.argmax(), JM.shape)
# if all Us are disjunct, JM is blank and merge_jk = [0,0]
# merge two smallest components and clean up from the bottom
cleanup = False
if merge_jk[0] == 0 and merge_jk[1] == 0:
logger.debug("neighborhoods disjunct. merging components %d and %d" % tuple(merge_jk))
merge_jk = np.argsort(gmm.amp)[:2]
cleanup = True
# split the one whose p(x|k) deviate most from current Gaussian
# ask for the three worst components to avoid split being in merge_jk
"""
JS = np.empty(gmm.K)
k = 0
A = gmm.amp * N
for JS[k] in \
parmap.map(_JS, xrange(gmm.K), gmm, log_p, log_S, U, A, pool=pool, chunksize=chunksize):
k += 1
"""
# get largest Eigenvalue, weighed by amplitude
# Large EV implies extended object, which often is caused by coverving
# multiple clusters. This happes also for almost empty components, which
# should rather be merged than split, hence amplitude weights.
# TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered
EV = np.linalg.svd(gmm.covar, compute_uv=False)
JS = EV[:,0] * gmm.amp
split_l3 = np.argsort(JS)[-3:][::-1]
# check that the three indices are unique
changing = np.array([merge_jk[0], merge_jk[1], split_l3[0]])
if split_l3[0] in merge_jk:
if split_l3[1] not in merge_jk:
changing[2] = split_l3[1]
else:
changing[2] = split_l3[2]
return changing, cleanup
def _update_snm(gmm, changeable, U, N, cleanup):
# reconstruct A from gmm.amp
A = gmm.amp * N
# update parameters and U
# merge 0 and 1, store in 0, Bovy eq. 39
gmm.amp[changeable[0]] = gmm.amp[changeable[0:2]].sum()
if not cleanup:
gmm.mean[changeable[0]] = np.sum(gmm.mean[changeable[0:2]] * A[changeable[0:2]][:,None], axis=0) / A[changeable[0:2]].sum()
gmm.covar[changeable[0]] = np.sum(gmm.covar[changeable[0:2]] * A[changeable[0:2]][:,None,None], axis=0) / A[changeable[0:2]].sum()
U[changeable[0]] = np.union1d(U[changeable[0]], U[changeable[1]])
else:
# if we're cleaning up the weakest components:
# merging does not lead to valid component parameters as the original
# ones can be anywhere. Simply adopt second one.
gmm.mean[changeable[0],:] = gmm.mean[changeable[1],:]
gmm.covar[changeable[0],:,:] = gmm.covar[changeable[1],:,:]
U[changeable[0]] = U[changeable[1]]
# split 2, store in 1 and 2
# following SVD method in Zhang 2003, with alpha=1/2, u = 1/4
gmm.amp[changeable[1]] = gmm.amp[changeable[2]] = gmm.amp[changeable[2]] / 2
# TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered
_, radius2, rotation = np.linalg.svd(gmm.covar[changeable[2]])
dl = np.sqrt(radius2[0]) * rotation[0] / 4
gmm.mean[changeable[1]] = gmm.mean[changeable[2]] - dl
gmm.mean[changeable[2]] = gmm.mean[changeable[2]] + dl
gmm.covar[changeable[1:]] = np.linalg.det(gmm.covar[changeable[2]])**(1/gmm.D) * np.eye(gmm.D)
U[changeable[1]] = U[changeable[2]].copy() # now 1 and 2 have same U
# L-fold cross-validation of the fit function.
# all parameters for fit must be supplied with kwargs.
# the rng seed will be fixed for the CV runs so that all random effects are the
# same for each run.
def cv_fit(gmm, data, L=10, **kwargs):
N = len(data)
lcv = np.empty(N)
logger.info("running %d-fold cross-validation ..." % L)
# CV and stacking can't have probabilistic inits that depends on
# data or subsets thereof
init_callback = kwargs.get("init_callback", None)
if init_callback is not None:
raise RuntimeError("Cross-validation can only be used consistently with init_callback=None")
# make sure we know what the RNG is,
# fix state of RNG to make behavior of fit reproducable
rng = kwargs.get("rng", np.random)
rng_state = rng.get_state()
# need to copy the gmm when init_cb is None
# otherwise runs start from different init positions
gmm0 = GMM(K=gmm.K, D=gmm.D)
gmm0.amp[:,] = gmm.amp[:]
gmm0.mean[:,:] = gmm.mean[:,:]
gmm0.covar[:,:,:] = gmm.covar[:,:,:]
# same for bg if present
bg = kwargs.get("background", None)
if bg is not None:
bg_amp0 = bg.amp
# to L-fold CV here, need to split covar too if set
covar = kwargs.pop("covar", None)
for i in xrange(L):
rng.set_state(rng_state)
mask = np.arange(N) % L == i
if covar is None or covar.shape == (gmm.D, gmm.D):
fit(gmm, data[~mask], covar=covar, **kwargs)
lcv[mask] = gmm.logL(data[mask], covar=covar)
else:
fit(gmm, data[~mask], covar=covar[~mask], **kwargs)
lcv[mask] = gmm.logL(data[mask], covar=covar[mask])
# undo for consistency
gmm.amp[:,] = gmm0.amp[:]
gmm.mean[:,:] = gmm0.mean[:,:]
gmm.covar[:,:,:] = gmm0.covar[:,:,:]
if bg is not None:
bg.amp = bg_amp0
return lcv
def stack(gmms, weights):
# build stacked model by combining all gmms and applying weights to amps
stacked = GMM(K=0, D=gmms[0].D)
for m in xrange(len(gmms)):
stacked.amp = np.concatenate((stacked.amp[:], weights[m]*gmms[m].amp[:]))
stacked.mean = np.concatenate((stacked.mean[:,:], gmms[m].mean[:,:]))
stacked.covar = np.concatenate((stacked.covar[:,:,:], gmms[m].covar[:,:,:]))
stacked.amp /= stacked.amp.sum()
return stacked
def stack_fit(gmms, data, kwargs, L=10, tol=1e-5, rng=np.random):
M = len(gmms)
N = len(data)
lcvs = np.empty((M,N))
for m in xrange(M):
# run CV to get cross-validation likelihood
rng_state = rng.get_state()
lcvs[m,:] = cv_fit(gmms[m], data, L=L, **(kwargs[m]))
rng.set_state(rng_state)
# run normal fit on all data
fit(gmms[m], data, **(kwargs[m]))
# determine the weights that maximize the stacked estimator likelihood
# run a tiny EM on lcvs to get them
beta = np.ones(M)/M
log_p_k = np.empty_like(lcvs)
log_S = np.empty(N)
it = 0
logger.info("optimizing stacking weights\n")
logger.info("ITER\tLOG_L")
while True and it < 20:
log_p_k[:,:] = lcvs + np.log(beta)[:,None]
log_S[:] = logsum(log_p_k)
log_p_k[:,:] -= log_S
beta[:] = np.exp(logsum(log_p_k, axis=1)) / N
logL_ = log_S.mean()
logger.info("STACK%d\t%.4f" % (it, logL_))
if it > 0 and logL_ - logL < tol:
break
logL = logL_
it += 1
return stack(gmms, beta)
| en | 0.826652 | # set up multiprocessing Create a shared array to be used for multiprocessing's processes. Taken from http://stackoverflow.com/questions/5549190/ Works only for float, double, int, long types (e.g. no bool). Args: numpy array, arbitrary shape Returns: numpy array whose container is a multiprocessing.Array # this is to allow multiprocessing pools to operate on class methods: # https://gist.github.com/bnyeggen/1086393 #deal with mangled names # python 2 -> 3 adjustments # Blantant copy from <NAME>'s esutil # https://github.com/esheldon/esutil/blob/master/esutil/numpy_util.py NAME: match CALLING SEQUENCE: ind1,ind2 = match(arr1, arr2, presorted=False) PURPOSE: Match two numpy arrays. Return the indices of the matches or empty arrays if no matches are found. This means arr1[ind1] == arr2[ind2] is true for all corresponding pairs. arr1 must contain only unique inputs, but arr2 may be non-unique. If you know arr1 is sorted, set presorted=True and it will run even faster METHOD: uses searchsorted with some sugar. Much faster than old version based on IDL code. REVISION HISTORY: Created 2015, <NAME>, SLAC. # make sure 1D # check for integer data... # make sure that arr1 has unique values... # sort arr1 if not presorted # search the sorted array # check for out-of-bounds at the high end if necessary Computes log of the sum along give axis from the log of the summands. This method tries hard to avoid over- or underflow. See appendix A of Bovy, <NAME> (2009). Args: logX: numpy array of logarithmic summands axis (int): axis to sum over Returns: log of the sum, shortened by one axis Throws: ValueError if logX has length 0 along given axis # adjust the shape of c for addition with logX D-dimensional eqiuvalent of "n sigma" cut. Evaluates the quantile function of the chi-squared distribution to determine the limit for the chi^2 of samples wrt to GMM so that they satisfy the 68-95-99.7 percent rule of the 1D Normal distribution. Args: D (int): dimensions of the feature space cutoff (float): 1D equivalent cut [in units of sigma] Returns: float: upper limit for chi-squared in D dimensions # no need to copy since a single covariance matrix is sufficient # return np.tile(default, (N,1,1)) Gaussian mixture model with K components in D dimensions. Attributes: amp: numpy array (K,), component amplitudes mean: numpy array (K,D), component means covar: numpy array (K,D,D), component covariances Create the arrays for amp, mean, covar. int: number of components, depends on size of amp. int: dimensions of the feature space. Save GMM to file. Args: filename (str): name for saved file, should end on .npz as the default of numpy.savez(), which is called here kwargs: dictionary of additional information to be stored in file. Returns: None Load GMM from file. Additional arguments stored by save() will be ignored. Args: filename (str): name for file create with save(). Returns: None Load GMM from file. Additional arguments stored by save() will be ignored. Args: filename (str): name for file create with save(). Returns: GMM Draw samples from the GMM. Args: size (int): number of samples to draw rng: numpy.random.RandomState for deterministic draw Returns: numpy array (size,D) # draw indices for components given amplitudes, need to make sure: sum=1 # for each component: draw as many points as in ind from a normal Evaluate model PDF at given coordinates. see logL() for details. Args: coords: numpy array (D,) or (N, D) of test coordinates covar: numpy array (D, D) or (N, D, D) covariance matrix of coords as_log (bool): return log(p) instead p Returns: numpy array (1,) or (N, 1) of PDF (or its log) # find how many components to distribute over available threads # split all component in ideal-sized chunks Log-likelihood of coords given all (i.e. the sum of) GMM components Distributes computation over all threads on the machine. If covar is None, this method returns log(sum_k(p(x | k))) of the data values x. If covar is set, the method returns log(sum_k(p(y | k))), where y = x + noise and noise ~ N(0, covar). Args: coords: numpy array (D,) or (N, D) of test coordinates covar: numpy array (D, D) or (N, D, D) covariance matrix of coords Returns: numpy array (1,) or (N, 1) log(L), depending on shape of data # Instead log p (x | k) for each k (which is huge) # compute it in stages: first for each chunk, then sum over all chunks # sum over all chunks = all k # helper function to reduce the memory requirement of logL Log-likelihood of coords given only component k. Args: k (int): component index coords: numpy array (D,) or (N, D) of test coordinates covar: numpy array (D, D) or (N, D, D) covariance matrix of coords chi2_only (bool): only compute deltaX^T Sigma_k^-1 deltaX Returns: numpy array (1,) or (N, 1) log(L), depending on shape of data # compute p(x | k) # prevent tiny negative determinants to mess up Background object to be used in conjuction with GMM. For a normalizable uniform distribution, a support footprint must be set. It should be sufficiently large to explain all non-clusters samples. Attributes: amp (float): mixing amplitude footprint: numpy array, (D,2) of rectangular volume adjust_amp (bool): whether amp will be adjusted as part of the fit amp_max (float): maximum value of amp allowed if adjust_amp=True Initialize Background with a footprint. Args: footprint: numpy array, (D,2) of rectangular volume Returns: None Probability of the background model. Returns: float, equal to 1/volume, where volume is given by footprint. Draw samples from uniform background. Args: size (int): number of samples to draw rng: numpy.random.RandomState for deterministic draw Returns: numpy array (size, D) ############################ # Begin of fit functions ############################ Initialization callback for uniform random component means. Component amplitudes are set at 1/gmm.K, covariances are set to s**2*np.eye(D), and means are distributed randomly over the range that is covered by data. If s is not given, it will be set such that the volume of all components completely fills the space covered by data. Args: gmm: A GMM to be initialized data: numpy array (N,D) to define the range of the component means covar: ignored in this callback s (float): if set, sets component variances k (iterable): list of components to set, is None sets all components rng: numpy.random.RandomState for deterministic behavior Returns: None # set model to random positions with equally sized spheres within # volumne spanned by data # if s is not set: use volume filling argument: # K spheres of radius s [having volume s^D * pi^D/2 / gamma(D/2+1)] # should completely fill the volume spanned by data. Initialization callback for component means to follow data on scales > s. Component amplitudes are set to 1/gmm.K, covariances are set to s**2*np.eye(D). For each mean, a data sample is selected at random, and a multivariant Gaussian offset is added, whose variance is given by s**2. If s is not given, it will be set such that the volume of all components completely fills the space covered by data. Args: gmm: A GMM to be initialized data: numpy array (N,D) to define the range of the component means covar: ignored in this callback s (float): if set, sets component variances k (iterable): list of components to set, is None sets all components rng: numpy.random.RandomState for deterministic behavior Returns: None # initialize components around data points with uncertainty s Initialization callback from a k-means clustering run. See Algorithm 1 from Bloemer & Bujna (arXiv:1312.5946) NOTE: The result of this call are not deterministic even if rng is set because scipy.cluster.vq.kmeans2 uses its own initialization. Args: gmm: A GMM to be initialized data: numpy array (N,D) to define the range of the component means covar: ignored in this callback rng: numpy.random.RandomState for deterministic behavior Returns: None # funny way of saying: for each point i, do the outer product # of d_m with its transpose and sum over i Fit GMM to data. If given, init_callback is called to set up the GMM components. Then, the EM sequence is repeated until the mean log-likelihood converges within tol. Args: gmm: an instance if GMM data: numpy array (N,D) covar: sample noise covariance; numpy array (N,D,D) or (D,D) if i.i.d. R: sample projection matrix; numpy array (N,D,D) init_method (string): one of ['random', 'minmax', 'kmeans', 'none'] defines the method to initialize the GMM components w (float): minimum covariance regularization cutoff (float): size of component neighborhood [in 1D equivalent sigmas] sel_callback: completeness callback to generate imputation samples. oversampling (int): number of imputation samples per data sample. only used if sel_callback is set. value of 1 is fine but results are noisy. Set as high as feasible. covar_callback: covariance callback for imputation samples. needs to be present if sel_callback and covar are set. background: an instance of Background if simultaneous fitting is desired tol (float): tolerance for covergence of mean log-likelihood maxiter (int): maximum number of iterations of EM frozen (iterable or dict): index list of components that are not updated split_n_merge (int): number of split & merge attempts rng: numpy.random.RandomState for deterministic behavior Notes: If frozen is a simple list, it will be assumed that is applies to mean and covariance of the specified components. It can also be a dictionary with the keys "mean" and "covar" to specify them separately. In either case, amplitudes will be updated to reflect any changes made. If frozen["amp"] is set, it will use this list instead. Returns: mean log-likelihood (float), component neighborhoods (list of ints) Throws: RuntimeError for inconsistent argument combinations # if there are data (features) missing, i.e. masked as np.nan, set them to zeros # and create/set covariance elements to very large value to reduce its weight # to effectively zero # value does not matter as long as it's not nan # need to create covar_callback if imputation is requested # init components # test if callbacks are consistent # set up pool # containers # precautions for cases when some points are treated as outliers # and not considered as belonging to any component # S = sum_k p(x|k) # P = p(x|k) for x in U[k] # T = covar(x) + gmm.covar[k] # U = {x close to k} # GMM amp + BG amp = 1 # p_bg = p(x|BG), no log because values are larger # check if covar is diagonal and issue warning if not # check if all component parameters can be changed # cleanup # should we try to improve by split'n'merge of components? # if so, keep backup copy # modify components # run partial EM on changeable components # NOTE: for a partial run, we'd only need the change to Log_S from the # changeable components. However, the neighborhoods can change from _update_snm # or because they move, so that operation is ill-defined. # Thus, we'll always run a full E-step, which is pretty cheap for # converged neighborhood. # The M-step could in principle be run on the changeable components only, # but there seem to be side effects in what I've tried. # Similar to the E-step, the imputation step needs to be run on all # components, otherwise the contribution of the changeable ones to the mixture # would be over-estimated. # Effectively, partial runs are as expensive as full runs. # revert to backup # run EM sequence # compute effective cutoff for chi2 in D dimensions # note: subsequently the cutoff parameter, e.g. in _E(), refers to this: # chi2 < cutoff, # while in fit() it means e.g. "cut at 3 sigma". # These differing conventions need to be documented well. # store chi2 cutoff for component shifts, use 0.5 sigma # save backup # size of original (unobscured) data set (signal and background) # size of imputed signal sample # limit loop in case of slow convergence # check if component has moved by more than sigma/2 # convergence tests # force update to U for all moved components # update all important _ quantities for convergence test(s) # backup to see if components move or if next step gets worse # note: not gmm = gmm_ ! # run one EM step # NOTE: T_inv (in fact (T_ik)^-1 for all samples i and components k) # is very large and is unfortunately duplicated in the parallelized _Mstep. # If memory is too limited, one can recompute T_inv in _Msums() instead. # here the magic happens: imputation from the current model # if there are projections / missing data, we don't know how to # generate those for the imputation samples # NOTE: in principle, if there are only missing data, i.e. R is 1_D, # we could ignore missingness for data2 because we'll do an analytic # marginalization. This doesn't work if R is a non-trivial matrix. # create fake data with same mechanism as the original data, # but invert selection to get the missing part # normalize for oversampling # need floating point precision in update # check if components have outside selection # correct the observed likelihood for the overall normalization constant of # of the data process with selection: # logL(x | gmm) = sum_k p_k(x) / Z(gmm), with Z(gmm) = int dx sum_k p_k(x) = 1 # becomes # logL(x | gmm) = sum_k Omega(x) p_k(x) / Z'(gmm), # with Z'(gmm) = int dx Omega(x) sum_k p_k(x), which we can gt by MC integration # perform E step calculations. # If cutoff is set, this will also set the neighborhoods U # compute p(i | k) for each k independently in the pool # need S = sum_k p(i | k) for further calculation # H = {i | i in neighborhood[k]} for any k, needed for outliers below # TODO: Use only when cutoff is set # actually S, not logS # This is the zeroth moment of a truncated Normal error distribution # Its calculation is simple only of the covariance is diagonal! # See e.g. Manjunath & Wilhem (2012) if not # one-for-all # CAUTION: The erf is approximate and returns 0 # Thus, we don't add the logs but multiple the value itself # underrun is not a big problem here # need log(S), but since log(0) isn't a good idea, need to restrict to H # compute chi^2, and apply selections on component neighborhood based in chi^2 # since U_k could be None, need explicit reshape # one-for-all # each datum has covariance # p(x | k) for all x in the vicinity of k # determine all points within cutoff sigma from mean[k] # with data errors: need to create and return T_ik = covar_i + C_k # and weight each datum appropriately # need to project out missing elements: T_ik = R_i C_k R_i^R + covar_i # NOTE: close to convergence, we could stop applying the cutoff because # changes to U will be minimal # prevent tiny negative determinants to mess up # since det(T^-1) = 1/det(T) # get zeroth, first, second moments of the data weighted with p_k(x) avgd over x # save the M sums from observed data # sum for amplitudes # ... means # ... covariances # perform sums for M step in the pool # NOTE: in a partial run, could work on changeable components only; # however, there seem to be side effects or race conditions # equivalent to A_k in _Msums, but done without logs # compute moments for the Mstep # get log_q_ik by dividing with S = sum_k p_ik # NOTE: this modifies log_p_k in place, but is only relevant # within this method since the call is parallel and its arguments # therefore don't get updated across components. # NOTE: reshape needed when U_k is None because of its # implicit meaning as np.newaxis # amplitude: A_k = sum_i q_ik # in fact: q_ik, but we treat sample index i silently everywhere # data with errors? # mean: M_k = sum_i x_i q_ik # covariance: C_k = sum_i (x_i - mu_k)^T(x_i - mu_k) q_ik # funny way of saying: for each point i, do the outer product # of d_m with its transpose, multiply with pi[i], and sum over i # that means T_ik is not None # b_ik = mu_k + C_k T_ik^-1 (x_i - mu_k) # B_ik = C_k - C_k T_ik^-1 C_k # F_ik = C_k R_i^T T_ik^-1 #b_k = gmm.mean[k] + np.einsum('ij,...jk,...k', gmm.covar[k], T_inv_k, d_m) #B_k = gmm.covar[k] - np.einsum('ij,...jk,...kl', gmm.covar[k], T_inv_k, gmm.covar[k]) # update component with the moment matrices. # If changeable is set, update only those components and renormalize the amplitudes # recompute background amplitude # amp update: # for partial update: need to update amp for any component that is changeable # it's a slice(None), not a bool array # Bovy eq. 31, with correction for bg.amp if needed # mean updateL # covar updateL # minimum covariance term? # we assume w to be a lower bound of the isotropic dispersion, # C_k = w^2 I + ... # then eq. 38 in Bovy et al. only ~works for N = 0 because of the # prefactor 1 / (q_j + 1) = 1 / (A + 1) in our terminology # On average, q_j = N/K, so we'll adopt that to correct. # draw from the model (+ background) and apply appropriate covariances # draw sample from model, or from background+model # model is GMM + Background # add noise # NOTE: When background is set, adding noise is problematic if # scattering them out is more likely than in. # This can be avoided when the background footprint is large compared to # selection region # one-for-all # create noise from unit covariance and then dot with eigenvalue # decomposition of covar2 to get a the right noise distribution: # n' = R V^1/2 n, where covar = R V R^-1 # faster than drawing one sample per each covariance # to prevent univariate errors to underflow Draw from the GMM (and the Background) with noise and selection. Draws orig_size samples from the GMM and the Background, if set; calls covar_callback if set and applies resulting covariances; the calls sel_callback on the (noisy) samples and returns those matching ones. If the number is resulting samples is inconsistent with obs_size, i.e. outside of the 68 percent confidence limit of a Poisson draw, it will update its estimate for the original sample size orig_size. An estimate can be provided with orig_size, otherwise it will use obs_size. Note: If sel_callback is set, the number of returned samples is not necessarily given by obs_size. Args: gmm: an instance if GMM obs_size (int): number of observed samples sel_callback: completeness callback to generate imputation samples. invert_sel (bool): whether to invert the result of sel_callback orig_size (int): an estimate of the original size of the sample. background: an instance of Background covar_callback: covariance callback for imputation samples. rng: numpy.random.RandomState for deterministic behavior Returns: sample: nunmpy array (N_orig, D) covar_sample: numpy array (N_orig, D, D) or None of covar_callback=None N_orig (int): updated estimate of orig_size if sel_callback is set Throws: RuntimeError for inconsistent argument combinations # draw from model (with background) and add noise. # TODO: may want to decide whether to add noise before selection or after # Here we do noise, then selection, but this is not fundamental # apply selection # check if predicted observed size is consistent with observed data # 68% confidence interval for Poisson variate: observed size # compute Kullback-Leiber divergence # find those components that are most similar # compute log_q (posterior for k given i), but use normalized probabilities # to allow for merging of empty components # don't need diagonal (can merge), and JM is symmetric # get index list for intersection of U of k and l # FIXME: match1d fails if either U is empty # SOLUTION: merge empty U, split another # if all Us are disjunct, JM is blank and merge_jk = [0,0] # merge two smallest components and clean up from the bottom # split the one whose p(x|k) deviate most from current Gaussian # ask for the three worst components to avoid split being in merge_jk JS = np.empty(gmm.K) k = 0 A = gmm.amp * N for JS[k] in \ parmap.map(_JS, xrange(gmm.K), gmm, log_p, log_S, U, A, pool=pool, chunksize=chunksize): k += 1 # get largest Eigenvalue, weighed by amplitude # Large EV implies extended object, which often is caused by coverving # multiple clusters. This happes also for almost empty components, which # should rather be merged than split, hence amplitude weights. # TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered # check that the three indices are unique # reconstruct A from gmm.amp # update parameters and U # merge 0 and 1, store in 0, Bovy eq. 39 # if we're cleaning up the weakest components: # merging does not lead to valid component parameters as the original # ones can be anywhere. Simply adopt second one. # split 2, store in 1 and 2 # following SVD method in Zhang 2003, with alpha=1/2, u = 1/4 # TODO: replace with linalg.eigvalsh, but eigenvalues are not always ordered # now 1 and 2 have same U # L-fold cross-validation of the fit function. # all parameters for fit must be supplied with kwargs. # the rng seed will be fixed for the CV runs so that all random effects are the # same for each run. # CV and stacking can't have probabilistic inits that depends on # data or subsets thereof # make sure we know what the RNG is, # fix state of RNG to make behavior of fit reproducable # need to copy the gmm when init_cb is None # otherwise runs start from different init positions # same for bg if present # to L-fold CV here, need to split covar too if set # undo for consistency # build stacked model by combining all gmms and applying weights to amps # run CV to get cross-validation likelihood # run normal fit on all data # determine the weights that maximize the stacked estimator likelihood # run a tiny EM on lcvs to get them | 3.039085 | 3 |
setup.py | quiqueporta/bendercito | 2 | 6631772 | from setuptools import setup
from bendercito import __version__
setup(name='bendercito',
version=__version__,
description="Change your Slack status from command-line.",
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
keywords='',
author=u'<NAME>',
author_email='<EMAIL>',
url='https://github.com/quiqueporta/bendercito',
download_url='https://github.com/quiqueporta/bendercito/releases',
license='MIT',
packages=['bendercito'],
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'bendercito = bendercito.bendercito:main'
]
})
| from setuptools import setup
from bendercito import __version__
setup(name='bendercito',
version=__version__,
description="Change your Slack status from command-line.",
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'
],
keywords='',
author=u'<NAME>',
author_email='<EMAIL>',
url='https://github.com/quiqueporta/bendercito',
download_url='https://github.com/quiqueporta/bendercito/releases',
license='MIT',
packages=['bendercito'],
include_package_data=True,
zip_safe=False,
install_requires=[line for line in open('requirements.txt')],
entry_points={
'console_scripts': [
'bendercito = bendercito.bendercito:main'
]
})
| none | 1 | 1.545923 | 2 |
|
msgraph-cli-extensions/beta/education_beta/azext_education_beta/generated/commands.py | thewahome/msgraph-cli | 0 | 6631773 | <reponame>thewahome/msgraph-cli
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=bad-continuation
# pylint: disable=line-too-long
from msgraph.cli.core.commands import CliCommandType
from azext_education_beta.generated._client_factory import (
cf_education_education_root,
cf_education,
cf_education_class,
cf_education_class_assignment,
cf_education_class_assignment_submission,
cf_education_class_member,
cf_education_class_school,
cf_education_class_teacher,
cf_education_me,
cf_education_me_assignment,
cf_education_me_assignment_submission,
cf_education_me_class,
cf_education_me_school,
cf_education_me_taught_class,
cf_education_school,
cf_education_school_class,
cf_education_school_user,
cf_education_synchronization_profile,
cf_education_user,
cf_education_user_assignment,
cf_education_user_assignment_submission,
cf_education_user_class,
cf_education_user_school,
cf_education_user_taught_class,
)
education_beta_education_education_root = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_education_root_operations#EducationEducationRootOperations.{}',
client_factory=cf_education_education_root,
)
education_beta_education = CliCommandType(
operations_tmpl=(
'azext_education_beta.vendored_sdks.education.operations._education_operations#EducationOperations.{}'
),
client_factory=cf_education,
)
education_beta_education_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_operations#EducationClassesOperations.{}',
client_factory=cf_education_class,
)
education_beta_education_class_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_assignments_operations#EducationClassesAssignmentsOperations.{}',
client_factory=cf_education_class_assignment,
)
education_beta_education_class_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_assignments_submissions_operations#EducationClassesAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_class_assignment_submission,
)
education_beta_education_class_member = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_members_operations#EducationClassesMembersOperations.{}',
client_factory=cf_education_class_member,
)
education_beta_education_class_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_schools_operations#EducationClassesSchoolsOperations.{}',
client_factory=cf_education_class_school,
)
education_beta_education_class_teacher = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_teachers_operations#EducationClassesTeachersOperations.{}',
client_factory=cf_education_class_teacher,
)
education_beta_education_me = CliCommandType(
operations_tmpl=(
'azext_education_beta.vendored_sdks.education.operations._education_me_operations#EducationMeOperations.{}'
),
client_factory=cf_education_me,
)
education_beta_education_me_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_assignments_operations#EducationMeAssignmentsOperations.{}',
client_factory=cf_education_me_assignment,
)
education_beta_education_me_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_assignments_submissions_operations#EducationMeAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_me_assignment_submission,
)
education_beta_education_me_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_classes_operations#EducationMeClassesOperations.{}',
client_factory=cf_education_me_class,
)
education_beta_education_me_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_schools_operations#EducationMeSchoolsOperations.{}',
client_factory=cf_education_me_school,
)
education_beta_education_me_taught_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_taught_classes_operations#EducationMeTaughtClassesOperations.{}',
client_factory=cf_education_me_taught_class,
)
education_beta_education_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_operations#EducationSchoolsOperations.{}',
client_factory=cf_education_school,
)
education_beta_education_school_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_classes_operations#EducationSchoolsClassesOperations.{}',
client_factory=cf_education_school_class,
)
education_beta_education_school_user = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_users_operations#EducationSchoolsUsersOperations.{}',
client_factory=cf_education_school_user,
)
education_beta_education_synchronization_profile = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_synchronization_profiles_operations#EducationSynchronizationProfilesOperations.{}',
client_factory=cf_education_synchronization_profile,
)
education_beta_education_user = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_operations#EducationUsersOperations.{}',
client_factory=cf_education_user,
)
education_beta_education_user_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_assignments_operations#EducationUsersAssignmentsOperations.{}',
client_factory=cf_education_user_assignment,
)
education_beta_education_user_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_assignments_submissions_operations#EducationUsersAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_user_assignment_submission,
)
education_beta_education_user_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_classes_operations#EducationUsersClassesOperations.{}',
client_factory=cf_education_user_class,
)
education_beta_education_user_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_schools_operations#EducationUsersSchoolsOperations.{}',
client_factory=cf_education_user_school,
)
education_beta_education_user_taught_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_taught_classes_operations#EducationUsersTaughtClassesOperations.{}',
client_factory=cf_education_user_taught_class,
)
def load_command_table(self, _):
with self.command_group(
'education education-root', education_beta_education_education_root, client_factory=cf_education_education_root
) as g:
g.custom_command('show-education-root', 'education_education_root_show_education_root')
g.custom_command('update-education-root', 'education_education_root_update_education_root')
with self.command_group('education education', education_beta_education, client_factory=cf_education) as g:
g.custom_command('create-class', 'education_education_create_class')
g.custom_command('create-school', 'education_education_create_school')
g.custom_command('create-synchronization-profile', 'education_education_create_synchronization_profile')
g.custom_command('create-user', 'education_education_create_user')
g.custom_command('delete-class', 'education_education_delete_class')
g.custom_command('delete-me', 'education_education_delete_me')
g.custom_command('delete-school', 'education_education_delete_school')
g.custom_command('delete-synchronization-profile', 'education_education_delete_synchronization_profile')
g.custom_command('delete-user', 'education_education_delete_user')
g.custom_command('list-class', 'education_education_list_class')
g.custom_command('list-school', 'education_education_list_school')
g.custom_command('list-synchronization-profile', 'education_education_list_synchronization_profile')
g.custom_command('list-user', 'education_education_list_user')
g.custom_command('show-class', 'education_education_show_class')
g.custom_command('show-me', 'education_education_show_me')
g.custom_command('show-school', 'education_education_show_school')
g.custom_command('show-synchronization-profile', 'education_education_show_synchronization_profile')
g.custom_command('show-user', 'education_education_show_user')
g.custom_command('update-class', 'education_education_update_class')
g.custom_command('update-me', 'education_education_update_me')
g.custom_command('update-school', 'education_education_update_school')
g.custom_command('update-synchronization-profile', 'education_education_update_synchronization_profile')
g.custom_command('update-user', 'education_education_update_user')
with self.command_group(
'education education-class', education_beta_education_class, client_factory=cf_education_class
) as g:
g.custom_command('create-assignment', 'education_education_class_create_assignment')
g.custom_command('create-assignment-category', 'education_education_class_create_assignment_category')
g.custom_command('create-ref-member', 'education_education_class_create_ref_member')
g.custom_command('create-ref-school', 'education_education_class_create_ref_school')
g.custom_command('create-ref-teacher', 'education_education_class_create_ref_teacher')
g.custom_command('delete-assignment', 'education_education_class_delete_assignment')
g.custom_command('delete-assignment-category', 'education_education_class_delete_assignment_category')
g.custom_command('delete-ref-group', 'education_education_class_delete_ref_group')
g.custom_command('delta', 'education_education_class_delta')
g.custom_command('list-assignment', 'education_education_class_list_assignment')
g.custom_command('list-assignment-category', 'education_education_class_list_assignment_category')
g.custom_command('list-member', 'education_education_class_list_member')
g.custom_command('list-ref-member', 'education_education_class_list_ref_member')
g.custom_command('list-ref-school', 'education_education_class_list_ref_school')
g.custom_command('list-ref-teacher', 'education_education_class_list_ref_teacher')
g.custom_command('list-school', 'education_education_class_list_school')
g.custom_command('list-teacher', 'education_education_class_list_teacher')
g.custom_command('set-ref-group', 'education_education_class_set_ref_group')
g.custom_command('show-assignment', 'education_education_class_show_assignment')
g.custom_command('show-assignment-category', 'education_education_class_show_assignment_category')
g.custom_command('show-group', 'education_education_class_show_group')
g.custom_command('show-ref-group', 'education_education_class_show_ref_group')
g.custom_command('update-assignment', 'education_education_class_update_assignment')
g.custom_command('update-assignment-category', 'education_education_class_update_assignment_category')
with self.command_group(
'education education-class-assignment',
education_beta_education_class_assignment,
client_factory=cf_education_class_assignment,
) as g:
g.custom_command('create-category', 'education_education_class_assignment_create_category')
g.custom_command('create-resource', 'education_education_class_assignment_create_resource')
g.custom_command('create-submission', 'education_education_class_assignment_create_submission')
g.custom_command('delete-category', 'education_education_class_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_class_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_class_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_class_assignment_delete_submission')
g.custom_command('list-category', 'education_education_class_assignment_list_category')
g.custom_command('list-resource', 'education_education_class_assignment_list_resource')
g.custom_command('list-submission', 'education_education_class_assignment_list_submission')
g.custom_command('publish', 'education_education_class_assignment_publish')
g.custom_command('show-category', 'education_education_class_assignment_show_category')
g.custom_command('show-resource', 'education_education_class_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_class_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_class_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_class_assignment_show_submission')
g.custom_command('update-category', 'education_education_class_assignment_update_category')
g.custom_command('update-resource', 'education_education_class_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_class_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_class_assignment_update_submission')
with self.command_group(
'education education-class-assignment-submission',
education_beta_education_class_assignment_submission,
client_factory=cf_education_class_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_class_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_class_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_class_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_class_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_class_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_class_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_class_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_class_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_class_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_class_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_class_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_class_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_class_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_class_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_class_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_class_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_class_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_class_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-class-member',
education_beta_education_class_member,
client_factory=cf_education_class_member,
) as g:
g.custom_command('delta', 'education_education_class_member_delta')
with self.command_group(
'education education-class-school',
education_beta_education_class_school,
client_factory=cf_education_class_school,
) as g:
g.custom_command('delta', 'education_education_class_school_delta')
with self.command_group(
'education education-class-teacher',
education_beta_education_class_teacher,
client_factory=cf_education_class_teacher,
) as g:
g.custom_command('delta', 'education_education_class_teacher_delta')
with self.command_group('education education-me', education_beta_education_me, client_factory=cf_education_me) as g:
g.custom_command('create-assignment', 'education_education_me_create_assignment')
g.custom_command('create-ref-class', 'education_education_me_create_ref_class')
g.custom_command('create-ref-school', 'education_education_me_create_ref_school')
g.custom_command('create-ref-taught-class', 'education_education_me_create_ref_taught_class')
g.custom_command('create-rubric', 'education_education_me_create_rubric')
g.custom_command('delete-assignment', 'education_education_me_delete_assignment')
g.custom_command('delete-ref-user', 'education_education_me_delete_ref_user')
g.custom_command('delete-rubric', 'education_education_me_delete_rubric')
g.custom_command('list-assignment', 'education_education_me_list_assignment')
g.custom_command('list-class', 'education_education_me_list_class')
g.custom_command('list-ref-class', 'education_education_me_list_ref_class')
g.custom_command('list-ref-school', 'education_education_me_list_ref_school')
g.custom_command('list-ref-taught-class', 'education_education_me_list_ref_taught_class')
g.custom_command('list-rubric', 'education_education_me_list_rubric')
g.custom_command('list-school', 'education_education_me_list_school')
g.custom_command('list-taught-class', 'education_education_me_list_taught_class')
g.custom_command('set-ref-user', 'education_education_me_set_ref_user')
g.custom_command('show-assignment', 'education_education_me_show_assignment')
g.custom_command('show-ref-user', 'education_education_me_show_ref_user')
g.custom_command('show-rubric', 'education_education_me_show_rubric')
g.custom_command('show-user', 'education_education_me_show_user')
g.custom_command('update-assignment', 'education_education_me_update_assignment')
g.custom_command('update-rubric', 'education_education_me_update_rubric')
with self.command_group(
'education education-me-assignment',
education_beta_education_me_assignment,
client_factory=cf_education_me_assignment,
) as g:
g.custom_command('create-category', 'education_education_me_assignment_create_category')
g.custom_command('create-resource', 'education_education_me_assignment_create_resource')
g.custom_command('create-submission', 'education_education_me_assignment_create_submission')
g.custom_command('delete-category', 'education_education_me_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_me_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_me_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_me_assignment_delete_submission')
g.custom_command('list-category', 'education_education_me_assignment_list_category')
g.custom_command('list-resource', 'education_education_me_assignment_list_resource')
g.custom_command('list-submission', 'education_education_me_assignment_list_submission')
g.custom_command('publish', 'education_education_me_assignment_publish')
g.custom_command('show-category', 'education_education_me_assignment_show_category')
g.custom_command('show-resource', 'education_education_me_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_me_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_me_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_me_assignment_show_submission')
g.custom_command('update-category', 'education_education_me_assignment_update_category')
g.custom_command('update-resource', 'education_education_me_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_me_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_me_assignment_update_submission')
with self.command_group(
'education education-me-assignment-submission',
education_beta_education_me_assignment_submission,
client_factory=cf_education_me_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_me_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_me_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_me_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_me_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_me_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_me_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_me_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_me_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_me_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_me_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_me_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_me_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_me_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_me_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_me_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_me_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_me_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_me_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-me-class', education_beta_education_me_class, client_factory=cf_education_me_class
) as g:
g.custom_command('delta', 'education_education_me_class_delta')
with self.command_group(
'education education-me-school', education_beta_education_me_school, client_factory=cf_education_me_school
) as g:
g.custom_command('delta', 'education_education_me_school_delta')
with self.command_group(
'education education-me-taught-class',
education_beta_education_me_taught_class,
client_factory=cf_education_me_taught_class,
) as g:
g.custom_command('delta', 'education_education_me_taught_class_delta')
with self.command_group(
'education education-school', education_beta_education_school, client_factory=cf_education_school
) as g:
g.custom_command('create-ref-class', 'education_education_school_create_ref_class')
g.custom_command('create-ref-user', 'education_education_school_create_ref_user')
g.custom_command('delete-ref-administrative-unit', 'education_education_school_delete_ref_administrative_unit')
g.custom_command('delta', 'education_education_school_delta')
g.custom_command('list-class', 'education_education_school_list_class')
g.custom_command('list-ref-class', 'education_education_school_list_ref_class')
g.custom_command('list-ref-user', 'education_education_school_list_ref_user')
g.custom_command('list-user', 'education_education_school_list_user')
g.custom_command('set-ref-administrative-unit', 'education_education_school_set_ref_administrative_unit')
g.custom_command('show-administrative-unit', 'education_education_school_show_administrative_unit')
g.custom_command('show-ref-administrative-unit', 'education_education_school_show_ref_administrative_unit')
with self.command_group(
'education education-school-class',
education_beta_education_school_class,
client_factory=cf_education_school_class,
) as g:
g.custom_command('delta', 'education_education_school_class_delta')
with self.command_group(
'education education-school-user', education_beta_education_school_user, client_factory=cf_education_school_user
) as g:
g.custom_command('delta', 'education_education_school_user_delta')
with self.command_group(
'education education-synchronization-profile',
education_beta_education_synchronization_profile,
client_factory=cf_education_synchronization_profile,
) as g:
g.custom_command('create-error', 'education_education_synchronization_profile_create_error')
g.custom_command('delete-error', 'education_education_synchronization_profile_delete_error')
g.custom_command('delete-profile-status', 'education_education_synchronization_profile_delete_profile_status')
g.custom_command('list-error', 'education_education_synchronization_profile_list_error')
g.custom_command('pause', 'education_education_synchronization_profile_pause')
g.custom_command('reset', 'education_education_synchronization_profile_reset')
g.custom_command('resume', 'education_education_synchronization_profile_resume')
g.custom_command('show-error', 'education_education_synchronization_profile_show_error')
g.custom_command('show-profile-status', 'education_education_synchronization_profile_show_profile_status')
g.custom_command('start', 'education_education_synchronization_profile_start')
g.custom_command('update-error', 'education_education_synchronization_profile_update_error')
g.custom_command('update-profile-status', 'education_education_synchronization_profile_update_profile_status')
g.custom_command('upload-url', 'education_education_synchronization_profile_upload_url')
with self.command_group(
'education education-user', education_beta_education_user, client_factory=cf_education_user
) as g:
g.custom_command('create-assignment', 'education_education_user_create_assignment')
g.custom_command('create-ref-class', 'education_education_user_create_ref_class')
g.custom_command('create-ref-school', 'education_education_user_create_ref_school')
g.custom_command('create-ref-taught-class', 'education_education_user_create_ref_taught_class')
g.custom_command('create-rubric', 'education_education_user_create_rubric')
g.custom_command('delete-assignment', 'education_education_user_delete_assignment')
g.custom_command('delete-ref-user', 'education_education_user_delete_ref_user')
g.custom_command('delete-rubric', 'education_education_user_delete_rubric')
g.custom_command('delta', 'education_education_user_delta')
g.custom_command('list-assignment', 'education_education_user_list_assignment')
g.custom_command('list-class', 'education_education_user_list_class')
g.custom_command('list-ref-class', 'education_education_user_list_ref_class')
g.custom_command('list-ref-school', 'education_education_user_list_ref_school')
g.custom_command('list-ref-taught-class', 'education_education_user_list_ref_taught_class')
g.custom_command('list-rubric', 'education_education_user_list_rubric')
g.custom_command('list-school', 'education_education_user_list_school')
g.custom_command('list-taught-class', 'education_education_user_list_taught_class')
g.custom_command('set-ref-user', 'education_education_user_set_ref_user')
g.custom_command('show-assignment', 'education_education_user_show_assignment')
g.custom_command('show-ref-user', 'education_education_user_show_ref_user')
g.custom_command('show-rubric', 'education_education_user_show_rubric')
g.custom_command('show-user', 'education_education_user_show_user')
g.custom_command('update-assignment', 'education_education_user_update_assignment')
g.custom_command('update-rubric', 'education_education_user_update_rubric')
with self.command_group(
'education education-user-assignment',
education_beta_education_user_assignment,
client_factory=cf_education_user_assignment,
) as g:
g.custom_command('create-category', 'education_education_user_assignment_create_category')
g.custom_command('create-resource', 'education_education_user_assignment_create_resource')
g.custom_command('create-submission', 'education_education_user_assignment_create_submission')
g.custom_command('delete-category', 'education_education_user_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_user_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_user_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_user_assignment_delete_submission')
g.custom_command('list-category', 'education_education_user_assignment_list_category')
g.custom_command('list-resource', 'education_education_user_assignment_list_resource')
g.custom_command('list-submission', 'education_education_user_assignment_list_submission')
g.custom_command('publish', 'education_education_user_assignment_publish')
g.custom_command('show-category', 'education_education_user_assignment_show_category')
g.custom_command('show-resource', 'education_education_user_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_user_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_user_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_user_assignment_show_submission')
g.custom_command('update-category', 'education_education_user_assignment_update_category')
g.custom_command('update-resource', 'education_education_user_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_user_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_user_assignment_update_submission')
with self.command_group(
'education education-user-assignment-submission',
education_beta_education_user_assignment_submission,
client_factory=cf_education_user_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_user_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_user_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_user_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_user_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_user_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_user_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_user_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_user_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_user_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_user_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_user_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_user_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_user_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_user_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_user_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_user_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_user_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_user_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-user-class', education_beta_education_user_class, client_factory=cf_education_user_class
) as g:
g.custom_command('delta', 'education_education_user_class_delta')
with self.command_group(
'education education-user-school', education_beta_education_user_school, client_factory=cf_education_user_school
) as g:
g.custom_command('delta', 'education_education_user_school_delta')
with self.command_group(
'education education-user-taught-class',
education_beta_education_user_taught_class,
client_factory=cf_education_user_taught_class,
) as g:
g.custom_command('delta', 'education_education_user_taught_class_delta')
with self.command_group('education_beta', is_experimental=True):
pass
| # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=bad-continuation
# pylint: disable=line-too-long
from msgraph.cli.core.commands import CliCommandType
from azext_education_beta.generated._client_factory import (
cf_education_education_root,
cf_education,
cf_education_class,
cf_education_class_assignment,
cf_education_class_assignment_submission,
cf_education_class_member,
cf_education_class_school,
cf_education_class_teacher,
cf_education_me,
cf_education_me_assignment,
cf_education_me_assignment_submission,
cf_education_me_class,
cf_education_me_school,
cf_education_me_taught_class,
cf_education_school,
cf_education_school_class,
cf_education_school_user,
cf_education_synchronization_profile,
cf_education_user,
cf_education_user_assignment,
cf_education_user_assignment_submission,
cf_education_user_class,
cf_education_user_school,
cf_education_user_taught_class,
)
education_beta_education_education_root = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_education_root_operations#EducationEducationRootOperations.{}',
client_factory=cf_education_education_root,
)
education_beta_education = CliCommandType(
operations_tmpl=(
'azext_education_beta.vendored_sdks.education.operations._education_operations#EducationOperations.{}'
),
client_factory=cf_education,
)
education_beta_education_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_operations#EducationClassesOperations.{}',
client_factory=cf_education_class,
)
education_beta_education_class_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_assignments_operations#EducationClassesAssignmentsOperations.{}',
client_factory=cf_education_class_assignment,
)
education_beta_education_class_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_assignments_submissions_operations#EducationClassesAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_class_assignment_submission,
)
education_beta_education_class_member = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_members_operations#EducationClassesMembersOperations.{}',
client_factory=cf_education_class_member,
)
education_beta_education_class_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_schools_operations#EducationClassesSchoolsOperations.{}',
client_factory=cf_education_class_school,
)
education_beta_education_class_teacher = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_classes_teachers_operations#EducationClassesTeachersOperations.{}',
client_factory=cf_education_class_teacher,
)
education_beta_education_me = CliCommandType(
operations_tmpl=(
'azext_education_beta.vendored_sdks.education.operations._education_me_operations#EducationMeOperations.{}'
),
client_factory=cf_education_me,
)
education_beta_education_me_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_assignments_operations#EducationMeAssignmentsOperations.{}',
client_factory=cf_education_me_assignment,
)
education_beta_education_me_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_assignments_submissions_operations#EducationMeAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_me_assignment_submission,
)
education_beta_education_me_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_classes_operations#EducationMeClassesOperations.{}',
client_factory=cf_education_me_class,
)
education_beta_education_me_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_schools_operations#EducationMeSchoolsOperations.{}',
client_factory=cf_education_me_school,
)
education_beta_education_me_taught_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_me_taught_classes_operations#EducationMeTaughtClassesOperations.{}',
client_factory=cf_education_me_taught_class,
)
education_beta_education_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_operations#EducationSchoolsOperations.{}',
client_factory=cf_education_school,
)
education_beta_education_school_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_classes_operations#EducationSchoolsClassesOperations.{}',
client_factory=cf_education_school_class,
)
education_beta_education_school_user = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_schools_users_operations#EducationSchoolsUsersOperations.{}',
client_factory=cf_education_school_user,
)
education_beta_education_synchronization_profile = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_synchronization_profiles_operations#EducationSynchronizationProfilesOperations.{}',
client_factory=cf_education_synchronization_profile,
)
education_beta_education_user = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_operations#EducationUsersOperations.{}',
client_factory=cf_education_user,
)
education_beta_education_user_assignment = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_assignments_operations#EducationUsersAssignmentsOperations.{}',
client_factory=cf_education_user_assignment,
)
education_beta_education_user_assignment_submission = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_assignments_submissions_operations#EducationUsersAssignmentsSubmissionsOperations.{}',
client_factory=cf_education_user_assignment_submission,
)
education_beta_education_user_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_classes_operations#EducationUsersClassesOperations.{}',
client_factory=cf_education_user_class,
)
education_beta_education_user_school = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_schools_operations#EducationUsersSchoolsOperations.{}',
client_factory=cf_education_user_school,
)
education_beta_education_user_taught_class = CliCommandType(
operations_tmpl='azext_education_beta.vendored_sdks.education.operations._education_users_taught_classes_operations#EducationUsersTaughtClassesOperations.{}',
client_factory=cf_education_user_taught_class,
)
def load_command_table(self, _):
with self.command_group(
'education education-root', education_beta_education_education_root, client_factory=cf_education_education_root
) as g:
g.custom_command('show-education-root', 'education_education_root_show_education_root')
g.custom_command('update-education-root', 'education_education_root_update_education_root')
with self.command_group('education education', education_beta_education, client_factory=cf_education) as g:
g.custom_command('create-class', 'education_education_create_class')
g.custom_command('create-school', 'education_education_create_school')
g.custom_command('create-synchronization-profile', 'education_education_create_synchronization_profile')
g.custom_command('create-user', 'education_education_create_user')
g.custom_command('delete-class', 'education_education_delete_class')
g.custom_command('delete-me', 'education_education_delete_me')
g.custom_command('delete-school', 'education_education_delete_school')
g.custom_command('delete-synchronization-profile', 'education_education_delete_synchronization_profile')
g.custom_command('delete-user', 'education_education_delete_user')
g.custom_command('list-class', 'education_education_list_class')
g.custom_command('list-school', 'education_education_list_school')
g.custom_command('list-synchronization-profile', 'education_education_list_synchronization_profile')
g.custom_command('list-user', 'education_education_list_user')
g.custom_command('show-class', 'education_education_show_class')
g.custom_command('show-me', 'education_education_show_me')
g.custom_command('show-school', 'education_education_show_school')
g.custom_command('show-synchronization-profile', 'education_education_show_synchronization_profile')
g.custom_command('show-user', 'education_education_show_user')
g.custom_command('update-class', 'education_education_update_class')
g.custom_command('update-me', 'education_education_update_me')
g.custom_command('update-school', 'education_education_update_school')
g.custom_command('update-synchronization-profile', 'education_education_update_synchronization_profile')
g.custom_command('update-user', 'education_education_update_user')
with self.command_group(
'education education-class', education_beta_education_class, client_factory=cf_education_class
) as g:
g.custom_command('create-assignment', 'education_education_class_create_assignment')
g.custom_command('create-assignment-category', 'education_education_class_create_assignment_category')
g.custom_command('create-ref-member', 'education_education_class_create_ref_member')
g.custom_command('create-ref-school', 'education_education_class_create_ref_school')
g.custom_command('create-ref-teacher', 'education_education_class_create_ref_teacher')
g.custom_command('delete-assignment', 'education_education_class_delete_assignment')
g.custom_command('delete-assignment-category', 'education_education_class_delete_assignment_category')
g.custom_command('delete-ref-group', 'education_education_class_delete_ref_group')
g.custom_command('delta', 'education_education_class_delta')
g.custom_command('list-assignment', 'education_education_class_list_assignment')
g.custom_command('list-assignment-category', 'education_education_class_list_assignment_category')
g.custom_command('list-member', 'education_education_class_list_member')
g.custom_command('list-ref-member', 'education_education_class_list_ref_member')
g.custom_command('list-ref-school', 'education_education_class_list_ref_school')
g.custom_command('list-ref-teacher', 'education_education_class_list_ref_teacher')
g.custom_command('list-school', 'education_education_class_list_school')
g.custom_command('list-teacher', 'education_education_class_list_teacher')
g.custom_command('set-ref-group', 'education_education_class_set_ref_group')
g.custom_command('show-assignment', 'education_education_class_show_assignment')
g.custom_command('show-assignment-category', 'education_education_class_show_assignment_category')
g.custom_command('show-group', 'education_education_class_show_group')
g.custom_command('show-ref-group', 'education_education_class_show_ref_group')
g.custom_command('update-assignment', 'education_education_class_update_assignment')
g.custom_command('update-assignment-category', 'education_education_class_update_assignment_category')
with self.command_group(
'education education-class-assignment',
education_beta_education_class_assignment,
client_factory=cf_education_class_assignment,
) as g:
g.custom_command('create-category', 'education_education_class_assignment_create_category')
g.custom_command('create-resource', 'education_education_class_assignment_create_resource')
g.custom_command('create-submission', 'education_education_class_assignment_create_submission')
g.custom_command('delete-category', 'education_education_class_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_class_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_class_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_class_assignment_delete_submission')
g.custom_command('list-category', 'education_education_class_assignment_list_category')
g.custom_command('list-resource', 'education_education_class_assignment_list_resource')
g.custom_command('list-submission', 'education_education_class_assignment_list_submission')
g.custom_command('publish', 'education_education_class_assignment_publish')
g.custom_command('show-category', 'education_education_class_assignment_show_category')
g.custom_command('show-resource', 'education_education_class_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_class_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_class_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_class_assignment_show_submission')
g.custom_command('update-category', 'education_education_class_assignment_update_category')
g.custom_command('update-resource', 'education_education_class_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_class_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_class_assignment_update_submission')
with self.command_group(
'education education-class-assignment-submission',
education_beta_education_class_assignment_submission,
client_factory=cf_education_class_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_class_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_class_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_class_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_class_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_class_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_class_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_class_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_class_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_class_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_class_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_class_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_class_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_class_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_class_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_class_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_class_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_class_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_class_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-class-member',
education_beta_education_class_member,
client_factory=cf_education_class_member,
) as g:
g.custom_command('delta', 'education_education_class_member_delta')
with self.command_group(
'education education-class-school',
education_beta_education_class_school,
client_factory=cf_education_class_school,
) as g:
g.custom_command('delta', 'education_education_class_school_delta')
with self.command_group(
'education education-class-teacher',
education_beta_education_class_teacher,
client_factory=cf_education_class_teacher,
) as g:
g.custom_command('delta', 'education_education_class_teacher_delta')
with self.command_group('education education-me', education_beta_education_me, client_factory=cf_education_me) as g:
g.custom_command('create-assignment', 'education_education_me_create_assignment')
g.custom_command('create-ref-class', 'education_education_me_create_ref_class')
g.custom_command('create-ref-school', 'education_education_me_create_ref_school')
g.custom_command('create-ref-taught-class', 'education_education_me_create_ref_taught_class')
g.custom_command('create-rubric', 'education_education_me_create_rubric')
g.custom_command('delete-assignment', 'education_education_me_delete_assignment')
g.custom_command('delete-ref-user', 'education_education_me_delete_ref_user')
g.custom_command('delete-rubric', 'education_education_me_delete_rubric')
g.custom_command('list-assignment', 'education_education_me_list_assignment')
g.custom_command('list-class', 'education_education_me_list_class')
g.custom_command('list-ref-class', 'education_education_me_list_ref_class')
g.custom_command('list-ref-school', 'education_education_me_list_ref_school')
g.custom_command('list-ref-taught-class', 'education_education_me_list_ref_taught_class')
g.custom_command('list-rubric', 'education_education_me_list_rubric')
g.custom_command('list-school', 'education_education_me_list_school')
g.custom_command('list-taught-class', 'education_education_me_list_taught_class')
g.custom_command('set-ref-user', 'education_education_me_set_ref_user')
g.custom_command('show-assignment', 'education_education_me_show_assignment')
g.custom_command('show-ref-user', 'education_education_me_show_ref_user')
g.custom_command('show-rubric', 'education_education_me_show_rubric')
g.custom_command('show-user', 'education_education_me_show_user')
g.custom_command('update-assignment', 'education_education_me_update_assignment')
g.custom_command('update-rubric', 'education_education_me_update_rubric')
with self.command_group(
'education education-me-assignment',
education_beta_education_me_assignment,
client_factory=cf_education_me_assignment,
) as g:
g.custom_command('create-category', 'education_education_me_assignment_create_category')
g.custom_command('create-resource', 'education_education_me_assignment_create_resource')
g.custom_command('create-submission', 'education_education_me_assignment_create_submission')
g.custom_command('delete-category', 'education_education_me_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_me_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_me_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_me_assignment_delete_submission')
g.custom_command('list-category', 'education_education_me_assignment_list_category')
g.custom_command('list-resource', 'education_education_me_assignment_list_resource')
g.custom_command('list-submission', 'education_education_me_assignment_list_submission')
g.custom_command('publish', 'education_education_me_assignment_publish')
g.custom_command('show-category', 'education_education_me_assignment_show_category')
g.custom_command('show-resource', 'education_education_me_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_me_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_me_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_me_assignment_show_submission')
g.custom_command('update-category', 'education_education_me_assignment_update_category')
g.custom_command('update-resource', 'education_education_me_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_me_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_me_assignment_update_submission')
with self.command_group(
'education education-me-assignment-submission',
education_beta_education_me_assignment_submission,
client_factory=cf_education_me_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_me_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_me_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_me_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_me_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_me_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_me_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_me_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_me_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_me_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_me_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_me_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_me_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_me_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_me_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_me_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_me_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_me_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_me_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-me-class', education_beta_education_me_class, client_factory=cf_education_me_class
) as g:
g.custom_command('delta', 'education_education_me_class_delta')
with self.command_group(
'education education-me-school', education_beta_education_me_school, client_factory=cf_education_me_school
) as g:
g.custom_command('delta', 'education_education_me_school_delta')
with self.command_group(
'education education-me-taught-class',
education_beta_education_me_taught_class,
client_factory=cf_education_me_taught_class,
) as g:
g.custom_command('delta', 'education_education_me_taught_class_delta')
with self.command_group(
'education education-school', education_beta_education_school, client_factory=cf_education_school
) as g:
g.custom_command('create-ref-class', 'education_education_school_create_ref_class')
g.custom_command('create-ref-user', 'education_education_school_create_ref_user')
g.custom_command('delete-ref-administrative-unit', 'education_education_school_delete_ref_administrative_unit')
g.custom_command('delta', 'education_education_school_delta')
g.custom_command('list-class', 'education_education_school_list_class')
g.custom_command('list-ref-class', 'education_education_school_list_ref_class')
g.custom_command('list-ref-user', 'education_education_school_list_ref_user')
g.custom_command('list-user', 'education_education_school_list_user')
g.custom_command('set-ref-administrative-unit', 'education_education_school_set_ref_administrative_unit')
g.custom_command('show-administrative-unit', 'education_education_school_show_administrative_unit')
g.custom_command('show-ref-administrative-unit', 'education_education_school_show_ref_administrative_unit')
with self.command_group(
'education education-school-class',
education_beta_education_school_class,
client_factory=cf_education_school_class,
) as g:
g.custom_command('delta', 'education_education_school_class_delta')
with self.command_group(
'education education-school-user', education_beta_education_school_user, client_factory=cf_education_school_user
) as g:
g.custom_command('delta', 'education_education_school_user_delta')
with self.command_group(
'education education-synchronization-profile',
education_beta_education_synchronization_profile,
client_factory=cf_education_synchronization_profile,
) as g:
g.custom_command('create-error', 'education_education_synchronization_profile_create_error')
g.custom_command('delete-error', 'education_education_synchronization_profile_delete_error')
g.custom_command('delete-profile-status', 'education_education_synchronization_profile_delete_profile_status')
g.custom_command('list-error', 'education_education_synchronization_profile_list_error')
g.custom_command('pause', 'education_education_synchronization_profile_pause')
g.custom_command('reset', 'education_education_synchronization_profile_reset')
g.custom_command('resume', 'education_education_synchronization_profile_resume')
g.custom_command('show-error', 'education_education_synchronization_profile_show_error')
g.custom_command('show-profile-status', 'education_education_synchronization_profile_show_profile_status')
g.custom_command('start', 'education_education_synchronization_profile_start')
g.custom_command('update-error', 'education_education_synchronization_profile_update_error')
g.custom_command('update-profile-status', 'education_education_synchronization_profile_update_profile_status')
g.custom_command('upload-url', 'education_education_synchronization_profile_upload_url')
with self.command_group(
'education education-user', education_beta_education_user, client_factory=cf_education_user
) as g:
g.custom_command('create-assignment', 'education_education_user_create_assignment')
g.custom_command('create-ref-class', 'education_education_user_create_ref_class')
g.custom_command('create-ref-school', 'education_education_user_create_ref_school')
g.custom_command('create-ref-taught-class', 'education_education_user_create_ref_taught_class')
g.custom_command('create-rubric', 'education_education_user_create_rubric')
g.custom_command('delete-assignment', 'education_education_user_delete_assignment')
g.custom_command('delete-ref-user', 'education_education_user_delete_ref_user')
g.custom_command('delete-rubric', 'education_education_user_delete_rubric')
g.custom_command('delta', 'education_education_user_delta')
g.custom_command('list-assignment', 'education_education_user_list_assignment')
g.custom_command('list-class', 'education_education_user_list_class')
g.custom_command('list-ref-class', 'education_education_user_list_ref_class')
g.custom_command('list-ref-school', 'education_education_user_list_ref_school')
g.custom_command('list-ref-taught-class', 'education_education_user_list_ref_taught_class')
g.custom_command('list-rubric', 'education_education_user_list_rubric')
g.custom_command('list-school', 'education_education_user_list_school')
g.custom_command('list-taught-class', 'education_education_user_list_taught_class')
g.custom_command('set-ref-user', 'education_education_user_set_ref_user')
g.custom_command('show-assignment', 'education_education_user_show_assignment')
g.custom_command('show-ref-user', 'education_education_user_show_ref_user')
g.custom_command('show-rubric', 'education_education_user_show_rubric')
g.custom_command('show-user', 'education_education_user_show_user')
g.custom_command('update-assignment', 'education_education_user_update_assignment')
g.custom_command('update-rubric', 'education_education_user_update_rubric')
with self.command_group(
'education education-user-assignment',
education_beta_education_user_assignment,
client_factory=cf_education_user_assignment,
) as g:
g.custom_command('create-category', 'education_education_user_assignment_create_category')
g.custom_command('create-resource', 'education_education_user_assignment_create_resource')
g.custom_command('create-submission', 'education_education_user_assignment_create_submission')
g.custom_command('delete-category', 'education_education_user_assignment_delete_category')
g.custom_command('delete-resource', 'education_education_user_assignment_delete_resource')
g.custom_command('delete-rubric', 'education_education_user_assignment_delete_rubric')
g.custom_command('delete-submission', 'education_education_user_assignment_delete_submission')
g.custom_command('list-category', 'education_education_user_assignment_list_category')
g.custom_command('list-resource', 'education_education_user_assignment_list_resource')
g.custom_command('list-submission', 'education_education_user_assignment_list_submission')
g.custom_command('publish', 'education_education_user_assignment_publish')
g.custom_command('show-category', 'education_education_user_assignment_show_category')
g.custom_command('show-resource', 'education_education_user_assignment_show_resource')
g.custom_command('show-resource-folder-url', 'education_education_user_assignment_show_resource_folder_url')
g.custom_command('show-rubric', 'education_education_user_assignment_show_rubric')
g.custom_command('show-submission', 'education_education_user_assignment_show_submission')
g.custom_command('update-category', 'education_education_user_assignment_update_category')
g.custom_command('update-resource', 'education_education_user_assignment_update_resource')
g.custom_command('update-rubric', 'education_education_user_assignment_update_rubric')
g.custom_command('update-submission', 'education_education_user_assignment_update_submission')
with self.command_group(
'education education-user-assignment-submission',
education_beta_education_user_assignment_submission,
client_factory=cf_education_user_assignment_submission,
) as g:
g.custom_command('create-outcome', 'education_education_user_assignment_submission_create_outcome')
g.custom_command('create-resource', 'education_education_user_assignment_submission_create_resource')
g.custom_command(
'create-submitted-resource', 'education_education_user_assignment_submission_create_submitted_resource'
)
g.custom_command('delete-outcome', 'education_education_user_assignment_submission_delete_outcome')
g.custom_command('delete-resource', 'education_education_user_assignment_submission_delete_resource')
g.custom_command(
'delete-submitted-resource', 'education_education_user_assignment_submission_delete_submitted_resource'
)
g.custom_command('list-outcome', 'education_education_user_assignment_submission_list_outcome')
g.custom_command('list-resource', 'education_education_user_assignment_submission_list_resource')
g.custom_command(
'list-submitted-resource', 'education_education_user_assignment_submission_list_submitted_resource'
)
g.custom_command('return', 'education_education_user_assignment_submission_return')
g.custom_command('show-outcome', 'education_education_user_assignment_submission_show_outcome')
g.custom_command('show-resource', 'education_education_user_assignment_submission_show_resource')
g.custom_command(
'show-submitted-resource', 'education_education_user_assignment_submission_show_submitted_resource'
)
g.custom_command('submit', 'education_education_user_assignment_submission_submit')
g.custom_command('unsubmit', 'education_education_user_assignment_submission_unsubmit')
g.custom_command('update-outcome', 'education_education_user_assignment_submission_update_outcome')
g.custom_command('update-resource', 'education_education_user_assignment_submission_update_resource')
g.custom_command(
'update-submitted-resource', 'education_education_user_assignment_submission_update_submitted_resource'
)
with self.command_group(
'education education-user-class', education_beta_education_user_class, client_factory=cf_education_user_class
) as g:
g.custom_command('delta', 'education_education_user_class_delta')
with self.command_group(
'education education-user-school', education_beta_education_user_school, client_factory=cf_education_user_school
) as g:
g.custom_command('delta', 'education_education_user_school_delta')
with self.command_group(
'education education-user-taught-class',
education_beta_education_user_taught_class,
client_factory=cf_education_user_taught_class,
) as g:
g.custom_command('delta', 'education_education_user_taught_class_delta')
with self.command_group('education_beta', is_experimental=True):
pass | en | 0.585472 | # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- # pylint: disable=too-many-statements # pylint: disable=too-many-locals # pylint: disable=bad-continuation # pylint: disable=line-too-long #EducationEducationRootOperations.{}', #EducationOperations.{}' #EducationClassesOperations.{}', #EducationClassesAssignmentsOperations.{}', #EducationClassesAssignmentsSubmissionsOperations.{}', #EducationClassesMembersOperations.{}', #EducationClassesSchoolsOperations.{}', #EducationClassesTeachersOperations.{}', #EducationMeOperations.{}' #EducationMeAssignmentsOperations.{}', #EducationMeAssignmentsSubmissionsOperations.{}', #EducationMeClassesOperations.{}', #EducationMeSchoolsOperations.{}', #EducationMeTaughtClassesOperations.{}', #EducationSchoolsOperations.{}', #EducationSchoolsClassesOperations.{}', #EducationSchoolsUsersOperations.{}', #EducationSynchronizationProfilesOperations.{}', #EducationUsersOperations.{}', #EducationUsersAssignmentsOperations.{}', #EducationUsersAssignmentsSubmissionsOperations.{}', #EducationUsersClassesOperations.{}', #EducationUsersSchoolsOperations.{}', #EducationUsersTaughtClassesOperations.{}', | 1.414257 | 1 |
useful_layers/layers/__init__.py | jernsting/useful_layers | 0 | 6631774 | <reponame>jernsting/useful_layers
from useful_layers.layers.squeeze_and_excitation import *
from useful_layers.layers.channel_attention import *
from useful_layers.layers.spatial_attention import *
| from useful_layers.layers.squeeze_and_excitation import *
from useful_layers.layers.channel_attention import *
from useful_layers.layers.spatial_attention import * | none | 1 | 1.014967 | 1 |
|
tests/testapp/urls.py | Brown-University-Library/django-select2 | 0 | 6631775 | <gh_stars>0
# -*- conding:utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from .forms import (
AddressChainedSelect2WidgetForm, AlbumModelSelect2WidgetForm,
HeavySelect2MultipleWidgetForm, HeavySelect2WidgetForm,
ModelSelect2TagWidgetForm, Select2WidgetForm
)
from .views import TemplateFormView, heavy_data_1, heavy_data_2
urlpatterns = [
url(r'^select2_widget/$',
TemplateFormView.as_view(form_class=Select2WidgetForm), name='select2_widget'),
url(r'^heavy_select2_widget/$',
TemplateFormView.as_view(form_class=HeavySelect2WidgetForm), name='heavy_select2_widget'),
url(r'^heavy_select2_multiple_widget/$',
TemplateFormView.as_view(form_class=HeavySelect2MultipleWidgetForm, success_url='/'),
name='heavy_select2_multiple_widget'),
url(r'^model_select2_widget/$',
TemplateFormView.as_view(form_class=AlbumModelSelect2WidgetForm),
name='model_select2_widget'),
url(r'^model_select2_tag_widget/$',
TemplateFormView.as_view(form_class=ModelSelect2TagWidgetForm),
name='model_select2_tag_widget'),
url(r'^model_chained_select2_widget/$',
TemplateFormView.as_view(form_class=AddressChainedSelect2WidgetForm),
name='model_chained_select2_widget'),
url(r'^heavy_data_1/$', heavy_data_1, name='heavy_data_1'),
url(r'^heavy_data_2/$', heavy_data_2, name='heavy_data_2'),
url(r'^select2/', include('django_select2.urls')),
]
| # -*- conding:utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from .forms import (
AddressChainedSelect2WidgetForm, AlbumModelSelect2WidgetForm,
HeavySelect2MultipleWidgetForm, HeavySelect2WidgetForm,
ModelSelect2TagWidgetForm, Select2WidgetForm
)
from .views import TemplateFormView, heavy_data_1, heavy_data_2
urlpatterns = [
url(r'^select2_widget/$',
TemplateFormView.as_view(form_class=Select2WidgetForm), name='select2_widget'),
url(r'^heavy_select2_widget/$',
TemplateFormView.as_view(form_class=HeavySelect2WidgetForm), name='heavy_select2_widget'),
url(r'^heavy_select2_multiple_widget/$',
TemplateFormView.as_view(form_class=HeavySelect2MultipleWidgetForm, success_url='/'),
name='heavy_select2_multiple_widget'),
url(r'^model_select2_widget/$',
TemplateFormView.as_view(form_class=AlbumModelSelect2WidgetForm),
name='model_select2_widget'),
url(r'^model_select2_tag_widget/$',
TemplateFormView.as_view(form_class=ModelSelect2TagWidgetForm),
name='model_select2_tag_widget'),
url(r'^model_chained_select2_widget/$',
TemplateFormView.as_view(form_class=AddressChainedSelect2WidgetForm),
name='model_chained_select2_widget'),
url(r'^heavy_data_1/$', heavy_data_1, name='heavy_data_1'),
url(r'^heavy_data_2/$', heavy_data_2, name='heavy_data_2'),
url(r'^select2/', include('django_select2.urls')),
] | en | 0.910848 | # -*- conding:utf-8 -*- | 2.009156 | 2 |
tests/user_info/test_add_user_info.py | yulgul/api_tests_store | 0 | 6631776 | <filename>tests/user_info/test_add_user_info.py
from fixtures.constants import ResponseText
from fixtures.user_info.model import AddUserInfo, AddUserInfoResponse
class TestAddUserInfo:
def test_add_user_info_with_valid_data(self, app, auth_user):
"""
1. Try to register user with valid data
2. Check that status code is 201
3. Check response
#"""
data = AddUserInfo.random()
res = app.userinfo.add_info(
user_id=auth_user.uuid,
data=data,
header=auth_user.header,
type_response=AddUserInfoResponse,
)
assert res.status_code == 200
assert res.data.message == ResponseText.MESSAGE_ADD_USER_INFO
def test_add_created_user_info_with_valid_data(self, app, auth_user_uuid_19):
"""
1. Try to register user with valid data
2. Check that status code is 201
3. Check response
#"""
data = AddUserInfo.random()
res = app.userinfo.add_info(
user_id=auth_user_uuid_19.uuid,
data=data,
header=auth_user_uuid_19.header,
type_response=AddUserInfoResponse,
)
assert res.status_code == 400
| <filename>tests/user_info/test_add_user_info.py
from fixtures.constants import ResponseText
from fixtures.user_info.model import AddUserInfo, AddUserInfoResponse
class TestAddUserInfo:
def test_add_user_info_with_valid_data(self, app, auth_user):
"""
1. Try to register user with valid data
2. Check that status code is 201
3. Check response
#"""
data = AddUserInfo.random()
res = app.userinfo.add_info(
user_id=auth_user.uuid,
data=data,
header=auth_user.header,
type_response=AddUserInfoResponse,
)
assert res.status_code == 200
assert res.data.message == ResponseText.MESSAGE_ADD_USER_INFO
def test_add_created_user_info_with_valid_data(self, app, auth_user_uuid_19):
"""
1. Try to register user with valid data
2. Check that status code is 201
3. Check response
#"""
data = AddUserInfo.random()
res = app.userinfo.add_info(
user_id=auth_user_uuid_19.uuid,
data=data,
header=auth_user_uuid_19.header,
type_response=AddUserInfoResponse,
)
assert res.status_code == 400
| en | 0.81647 | 1. Try to register user with valid data 2. Check that status code is 201 3. Check response # 1. Try to register user with valid data 2. Check that status code is 201 3. Check response # | 2.804792 | 3 |
smrf/spatial/idw.py | scotthavens/smrf | 0 | 6631777 | <reponame>scotthavens/smrf
'''
2015-11-30 <NAME>
updated 2015-12-31 <NAME>
- start using panda dataframes to help keep track of stations
Distributed forcing data over a grid using different methods
'''
import numpy as np
class IDW:
'''
Inverse distance weighting class for distributing input data. Availables
options are:
* Standard IDW
* Detrended IDW
'''
def __init__(self, mx, my, GridX, GridY, mz=None, GridZ=None,
power=2, zeroVal=-1):
"""
Args:
mx: x locations for the points
my: y locations for the points
GridX: x locations in grid to interpolate over
GridY: y locations in grid to interpolate over
mz: elevation for the points
GridZ: Elevation values for the points to interpolate over for
trended data
power: power of the inverse distance weighting
"""
# measurement point locations
self.mx = mx
self.my = my
self.mz = mz
self.npoints = len(mx)
# grid information
self.GridX = GridX
self.GridY = GridY
self.GridZ = GridZ
# data information
self.data = None
self.nan_val = []
# IDW parameters
self.power = power
self.zeroVal = zeroVal
# calculate the distances
self.calculateDistances()
# calculate the weights
self.calculateWeights()
def calculateDistances(self):
'''
Calculate the distances from the measurement locations to the
grid locations
'''
# preallocate
self.distance = np.empty((self.GridX.shape[0],
self.GridX.shape[1],
self.npoints))
for i in range(self.npoints):
self.distance[:, :, i] = np.sqrt((self.GridX - self.mx[i])**2 +
(self.GridY - self.my[i])**2)
# remove any zero values
self.distance[np.where(self.distance == 0)] = np.min(self.distance)
def calculateWeights(self):
'''
Calculate the weights for
'''
# calculate the weights
self.weights = 1.0/(np.power(self.distance, self.power))
# if there are Inf values, set to 1 as the distance was 0
# self.weights[np.isinf(self.weights)] = 100
def calculateIDW(self, data, local=False):
'''
Calculate the IDW of the data at mx,my over GridX,GridY
Inputs:
data - is the same size at mx,my
'''
nan_val = ~np.isnan(data)
w = self.weights[:, :, nan_val]
data = data[nan_val]
v = np.nansum(w * data, 2) / np.sum(w, 2)
return v
def detrendedIDW(self, data, flag=0, zeros=None, local=False):
'''
Calculate the detrended IDW of the data at mx,my over GridX,GridY
Inputs:
data - is the same size at mx,my
'''
self.detrendData(data, flag, zeros)
v = self.calculateIDW(self.dtrend, local)
# vtmp = v.copy()
v = self.retrendData(v)
if zeros is not None:
v[v < 0] = 0
return v
def detrendData(self, data, flag=0, zeros=None):
'''
Detrend the data in val using the heights zmeas
data - is the same size at mx,my
flag - 1 for positive, -1 for negative, 0 for any trend imposed
'''
# calculate the trend on any real data
nan_val = np.isnan(data)
pv = np.polyfit(self.mz[~nan_val], data[~nan_val], 1)
# apply trend constraints
if flag == 1 and pv[0] < 0:
pv = np.array([0, 0])
elif (flag == -1 and pv[0] > 0):
pv = np.array([0, 0])
self.pv = pv
# detrend the data
el_trend = self.mz * pv[0] + pv[1]
if zeros is not None:
data[zeros] = self.zeroVal
self.dtrend = data - el_trend
def retrendData(self, idw):
'''
Retrend the IDW values
'''
# retrend the data
return idw + self.pv[0]*self.GridZ + self.pv[1]
| '''
2015-11-30 <NAME>
updated 2015-12-31 <NAME>
- start using panda dataframes to help keep track of stations
Distributed forcing data over a grid using different methods
'''
import numpy as np
class IDW:
'''
Inverse distance weighting class for distributing input data. Availables
options are:
* Standard IDW
* Detrended IDW
'''
def __init__(self, mx, my, GridX, GridY, mz=None, GridZ=None,
power=2, zeroVal=-1):
"""
Args:
mx: x locations for the points
my: y locations for the points
GridX: x locations in grid to interpolate over
GridY: y locations in grid to interpolate over
mz: elevation for the points
GridZ: Elevation values for the points to interpolate over for
trended data
power: power of the inverse distance weighting
"""
# measurement point locations
self.mx = mx
self.my = my
self.mz = mz
self.npoints = len(mx)
# grid information
self.GridX = GridX
self.GridY = GridY
self.GridZ = GridZ
# data information
self.data = None
self.nan_val = []
# IDW parameters
self.power = power
self.zeroVal = zeroVal
# calculate the distances
self.calculateDistances()
# calculate the weights
self.calculateWeights()
def calculateDistances(self):
'''
Calculate the distances from the measurement locations to the
grid locations
'''
# preallocate
self.distance = np.empty((self.GridX.shape[0],
self.GridX.shape[1],
self.npoints))
for i in range(self.npoints):
self.distance[:, :, i] = np.sqrt((self.GridX - self.mx[i])**2 +
(self.GridY - self.my[i])**2)
# remove any zero values
self.distance[np.where(self.distance == 0)] = np.min(self.distance)
def calculateWeights(self):
'''
Calculate the weights for
'''
# calculate the weights
self.weights = 1.0/(np.power(self.distance, self.power))
# if there are Inf values, set to 1 as the distance was 0
# self.weights[np.isinf(self.weights)] = 100
def calculateIDW(self, data, local=False):
'''
Calculate the IDW of the data at mx,my over GridX,GridY
Inputs:
data - is the same size at mx,my
'''
nan_val = ~np.isnan(data)
w = self.weights[:, :, nan_val]
data = data[nan_val]
v = np.nansum(w * data, 2) / np.sum(w, 2)
return v
def detrendedIDW(self, data, flag=0, zeros=None, local=False):
'''
Calculate the detrended IDW of the data at mx,my over GridX,GridY
Inputs:
data - is the same size at mx,my
'''
self.detrendData(data, flag, zeros)
v = self.calculateIDW(self.dtrend, local)
# vtmp = v.copy()
v = self.retrendData(v)
if zeros is not None:
v[v < 0] = 0
return v
def detrendData(self, data, flag=0, zeros=None):
'''
Detrend the data in val using the heights zmeas
data - is the same size at mx,my
flag - 1 for positive, -1 for negative, 0 for any trend imposed
'''
# calculate the trend on any real data
nan_val = np.isnan(data)
pv = np.polyfit(self.mz[~nan_val], data[~nan_val], 1)
# apply trend constraints
if flag == 1 and pv[0] < 0:
pv = np.array([0, 0])
elif (flag == -1 and pv[0] > 0):
pv = np.array([0, 0])
self.pv = pv
# detrend the data
el_trend = self.mz * pv[0] + pv[1]
if zeros is not None:
data[zeros] = self.zeroVal
self.dtrend = data - el_trend
def retrendData(self, idw):
'''
Retrend the IDW values
'''
# retrend the data
return idw + self.pv[0]*self.GridZ + self.pv[1] | en | 0.725034 | 2015-11-30 <NAME> updated 2015-12-31 <NAME> - start using panda dataframes to help keep track of stations Distributed forcing data over a grid using different methods Inverse distance weighting class for distributing input data. Availables options are: * Standard IDW * Detrended IDW Args: mx: x locations for the points my: y locations for the points GridX: x locations in grid to interpolate over GridY: y locations in grid to interpolate over mz: elevation for the points GridZ: Elevation values for the points to interpolate over for trended data power: power of the inverse distance weighting # measurement point locations # grid information # data information # IDW parameters # calculate the distances # calculate the weights Calculate the distances from the measurement locations to the grid locations # preallocate # remove any zero values Calculate the weights for # calculate the weights # if there are Inf values, set to 1 as the distance was 0 # self.weights[np.isinf(self.weights)] = 100 Calculate the IDW of the data at mx,my over GridX,GridY Inputs: data - is the same size at mx,my Calculate the detrended IDW of the data at mx,my over GridX,GridY Inputs: data - is the same size at mx,my # vtmp = v.copy() Detrend the data in val using the heights zmeas data - is the same size at mx,my flag - 1 for positive, -1 for negative, 0 for any trend imposed # calculate the trend on any real data # apply trend constraints # detrend the data Retrend the IDW values # retrend the data | 3.054581 | 3 |
webapp/lesson/forms.py | Ilyaivanov60/web_dictionary | 0 | 6631778 | from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
class lessonForm(FlaskForm):
create = SubmitField('Создать', render_kw={"class": "btn btn-primary"})
| from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
class lessonForm(FlaskForm):
create = SubmitField('Создать', render_kw={"class": "btn btn-primary"})
| none | 1 | 2.235399 | 2 |
|
kerbal_api/cfg_parser/constants.py | obi1kenobi/kerbal-api | 3 | 6631779 | import re
# Localization tags of the form "#autoLOC_123456" are generally followed by a comment
# that may include the same localization tag and an "=" sign, followed by the English text.
# This pattern matches localized values, extracting:
# - the tag value, under group name "tag"
# - the English text, under group name "english"
localization_pattern = re.compile(r"(?P<tag>#autoLOC_\d+)\s+//(?:\s*\1\s*=)?(?P<english>.*)")
comment_sequence = "//"
| import re
# Localization tags of the form "#autoLOC_123456" are generally followed by a comment
# that may include the same localization tag and an "=" sign, followed by the English text.
# This pattern matches localized values, extracting:
# - the tag value, under group name "tag"
# - the English text, under group name "english"
localization_pattern = re.compile(r"(?P<tag>#autoLOC_\d+)\s+//(?:\s*\1\s*=)?(?P<english>.*)")
comment_sequence = "//"
| en | 0.742654 | # Localization tags of the form "#autoLOC_123456" are generally followed by a comment # that may include the same localization tag and an "=" sign, followed by the English text. # This pattern matches localized values, extracting: # - the tag value, under group name "tag" # - the English text, under group name "english" #autoLOC_\d+)\s+//(?:\s*\1\s*=)?(?P<english>.*)") | 3.042198 | 3 |
Code/scripts/notebook_utils.py | madHatter106/Bayesian_TOA_ML | 0 | 6631780 | import numpy as np
import matplotlib.pyplot as pl
import pandas as pd
from seaborn import heatmap
from cmocean import cm as cmo
def plot_pca_res(pca_machine, threshold=0.85, alpha=1,
num_pca_disp=None, ax=None):
"""Plot PCA results."""
if ax is None:
_, ax = pl.subplots(figsize=(12, 10))
cum_expl_var = np.cumsum(pca_machine.explained_variance_ratio_)
if num_pca_disp is None:
num_pca_disp = np.argmax(cum_expl_var > 0.999) + 1
ax.bar(range(1, num_pca_disp+1),
pca_machine.explained_variance_ratio_[:num_pca_disp],
align='center', color='skyblue',
label='PC explained_variance')
ax.step(range(1, num_pca_disp+1),
np.cumsum(pca_machine.explained_variance_ratio_[:num_pca_disp]),
where='mid',
label='cumulated variance')
ax.hlines(threshold, 0, num_pca_disp+2, linestyles='--', linewidth=2,
label='selection cutoff: %.2f' % threshold)
ax.set_xticks(np.arange(1, num_pca_disp+1))
ax.set_xticklabels(['PC%d' % i for i in range(1, num_pca_disp+1)],
rotation=45)
ax.set_xlim((0.5, 0.5+num_pca_disp))
ax.set_ylim((0, 1))
ax.set_title('PCA Explained Variance')
ax.legend(loc='center right')
def plot_cross_corr(df_pca, df, ax=None, **heatmap_kws):
dfrrs_w_pca = pd.merge(df_pca, df, 'outer',
left_index=True,
right_index=True)
corr_w_pca = dfrrs_w_pca.corr().T
corr_w_pca.drop(df_pca.columns, axis=0, inplace=True)
corr_w_pca.drop(df.columns, axis=1, inplace=True)
if ax is None:
_, ax = pl.subplots(figsize=(20, 5))
heatmap(corr_w_pca, cmap=cmo.balance, annot=True,
vmin=-1, vmax=1, ax=ax, **heatmap_kws);
def fit_plotter(x, y, fit_fn=None, transform=None, noise=None):
line_kwargs = {'color': 'blue'}
dot_kwargs = {'alpha': 0.5}
if fit_fn:
if transform is None:
transform = lambda j: j
x_data = np.atleast_2d(np.linspace(x.min(), x.max(), num=500)).T
y_fit = fit_fn(transform(x_data))
if y_fit.ndim == 2 and y_fit.shape[1] > 1:
n_lines = y_fit.shape[1]
if n_lines > 100:
indices = np.linspace(0, n_lines - 1, num=100, dtype=int)
y_fit = y_fit[:, indices]
n_lines = len(indices)
line_kwargs['alpha'] = 0.3
line_kwargs['linewidth'] = 2
x_data = np.repeat(np.atleast_2d(x_data), n_lines, axis=1)
pl.plot(x_data, y_fit, '-', **line_kwargs)
if noise is not None:
noise = noise[indices]
noise_kwargs = line_kwargs.copy()
noise_kwargs['color'] = 'steelblue'
noise_kwargs['linewidth'] = line_kwargs['linewidth'] * 0.5
for const in (-2, -1, 1, 2):
noise_kwargs['alpha'] = 0.5 * line_kwargs['alpha'] / abs(const)
pl.plot(x_data, y_fit + const * noise, '-', **noise_kwargs)
plt.plot(x, y, 'ro', **dot_kwargs)
| import numpy as np
import matplotlib.pyplot as pl
import pandas as pd
from seaborn import heatmap
from cmocean import cm as cmo
def plot_pca_res(pca_machine, threshold=0.85, alpha=1,
num_pca_disp=None, ax=None):
"""Plot PCA results."""
if ax is None:
_, ax = pl.subplots(figsize=(12, 10))
cum_expl_var = np.cumsum(pca_machine.explained_variance_ratio_)
if num_pca_disp is None:
num_pca_disp = np.argmax(cum_expl_var > 0.999) + 1
ax.bar(range(1, num_pca_disp+1),
pca_machine.explained_variance_ratio_[:num_pca_disp],
align='center', color='skyblue',
label='PC explained_variance')
ax.step(range(1, num_pca_disp+1),
np.cumsum(pca_machine.explained_variance_ratio_[:num_pca_disp]),
where='mid',
label='cumulated variance')
ax.hlines(threshold, 0, num_pca_disp+2, linestyles='--', linewidth=2,
label='selection cutoff: %.2f' % threshold)
ax.set_xticks(np.arange(1, num_pca_disp+1))
ax.set_xticklabels(['PC%d' % i for i in range(1, num_pca_disp+1)],
rotation=45)
ax.set_xlim((0.5, 0.5+num_pca_disp))
ax.set_ylim((0, 1))
ax.set_title('PCA Explained Variance')
ax.legend(loc='center right')
def plot_cross_corr(df_pca, df, ax=None, **heatmap_kws):
dfrrs_w_pca = pd.merge(df_pca, df, 'outer',
left_index=True,
right_index=True)
corr_w_pca = dfrrs_w_pca.corr().T
corr_w_pca.drop(df_pca.columns, axis=0, inplace=True)
corr_w_pca.drop(df.columns, axis=1, inplace=True)
if ax is None:
_, ax = pl.subplots(figsize=(20, 5))
heatmap(corr_w_pca, cmap=cmo.balance, annot=True,
vmin=-1, vmax=1, ax=ax, **heatmap_kws);
def fit_plotter(x, y, fit_fn=None, transform=None, noise=None):
line_kwargs = {'color': 'blue'}
dot_kwargs = {'alpha': 0.5}
if fit_fn:
if transform is None:
transform = lambda j: j
x_data = np.atleast_2d(np.linspace(x.min(), x.max(), num=500)).T
y_fit = fit_fn(transform(x_data))
if y_fit.ndim == 2 and y_fit.shape[1] > 1:
n_lines = y_fit.shape[1]
if n_lines > 100:
indices = np.linspace(0, n_lines - 1, num=100, dtype=int)
y_fit = y_fit[:, indices]
n_lines = len(indices)
line_kwargs['alpha'] = 0.3
line_kwargs['linewidth'] = 2
x_data = np.repeat(np.atleast_2d(x_data), n_lines, axis=1)
pl.plot(x_data, y_fit, '-', **line_kwargs)
if noise is not None:
noise = noise[indices]
noise_kwargs = line_kwargs.copy()
noise_kwargs['color'] = 'steelblue'
noise_kwargs['linewidth'] = line_kwargs['linewidth'] * 0.5
for const in (-2, -1, 1, 2):
noise_kwargs['alpha'] = 0.5 * line_kwargs['alpha'] / abs(const)
pl.plot(x_data, y_fit + const * noise, '-', **noise_kwargs)
plt.plot(x, y, 'ro', **dot_kwargs)
| en | 0.796139 | Plot PCA results. | 2.768547 | 3 |
codigo_das_aulas/aula_06/aula_06_4.py | VeirichR/curso-python-selenium | 234 | 6631781 | from selenium.webdriver import Firefox
b = Firefox()
url = 'http://selenium.dunossauro.live/aula_06.html'
b.get(url)
| from selenium.webdriver import Firefox
b = Firefox()
url = 'http://selenium.dunossauro.live/aula_06.html'
b.get(url)
| none | 1 | 2.00657 | 2 |
|
apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/ortools/sat/python/cp_model_helper.py | tharindu1st/apim-migration-resources | 0 | 6631782 | <filename>apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/ortools/sat/python/cp_model_helper.py<gh_stars>0
# Copyright 2010-2018 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""helpers methods for the cp_model module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
INT_MIN = -9223372036854775808 # hardcoded to be platform independent.
INT_MAX = 9223372036854775807
INT32_MIN = -2147483648
INT32_MAX = 2147483647
def AssertIsInt64(x):
"""Asserts that x is integer and x is in [min_int_64, max_int_64]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT_MIN or x > INT_MAX:
raise OverflowError('Does not fit in an int64: %s' % x)
def AssertIsInt32(x):
"""Asserts that x is integer and x is in [min_int_32, max_int_32]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT32_MIN or x > INT32_MAX:
raise OverflowError('Does not fit in an int32: %s' % x)
def AssertIsBoolean(x):
"""Asserts that x is 0 or 1."""
if not isinstance(x, numbers.Integral) or x < 0 or x > 1:
raise TypeError('Not an boolean: %s' % x)
def CapInt64(v):
"""Restrict v within [INT_MIN..INT_MAX] range."""
if v > INT_MAX:
return INT_MAX
if v < INT_MIN:
return INT_MIN
return v
def CapSub(x, y):
"""Saturated arithmetics. Returns x - y truncated to the int64 range."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not integral: ' + str(x))
if not isinstance(y, numbers.Integral):
raise TypeError('Not integral: ' + str(y))
AssertIsInt64(x)
AssertIsInt64(y)
if y == 0:
return x
if x == y:
if x == INT_MAX or x == INT_MIN:
raise OverflowError(
'Integer NaN: subtracting INT_MAX or INT_MIN to itself')
return 0
if x == INT_MAX or x == INT_MIN:
return x
if y == INT_MAX:
return INT_MIN
if y == INT_MIN:
return INT_MAX
return CapInt64(x - y)
| <filename>apim-migration-testing-tool/Python/venv/lib/python3.6/site-packages/ortools/sat/python/cp_model_helper.py<gh_stars>0
# Copyright 2010-2018 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""helpers methods for the cp_model module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numbers
INT_MIN = -9223372036854775808 # hardcoded to be platform independent.
INT_MAX = 9223372036854775807
INT32_MIN = -2147483648
INT32_MAX = 2147483647
def AssertIsInt64(x):
"""Asserts that x is integer and x is in [min_int_64, max_int_64]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT_MIN or x > INT_MAX:
raise OverflowError('Does not fit in an int64: %s' % x)
def AssertIsInt32(x):
"""Asserts that x is integer and x is in [min_int_32, max_int_32]."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not an integer: %s' % x)
if x < INT32_MIN or x > INT32_MAX:
raise OverflowError('Does not fit in an int32: %s' % x)
def AssertIsBoolean(x):
"""Asserts that x is 0 or 1."""
if not isinstance(x, numbers.Integral) or x < 0 or x > 1:
raise TypeError('Not an boolean: %s' % x)
def CapInt64(v):
"""Restrict v within [INT_MIN..INT_MAX] range."""
if v > INT_MAX:
return INT_MAX
if v < INT_MIN:
return INT_MIN
return v
def CapSub(x, y):
"""Saturated arithmetics. Returns x - y truncated to the int64 range."""
if not isinstance(x, numbers.Integral):
raise TypeError('Not integral: ' + str(x))
if not isinstance(y, numbers.Integral):
raise TypeError('Not integral: ' + str(y))
AssertIsInt64(x)
AssertIsInt64(y)
if y == 0:
return x
if x == y:
if x == INT_MAX or x == INT_MIN:
raise OverflowError(
'Integer NaN: subtracting INT_MAX or INT_MIN to itself')
return 0
if x == INT_MAX or x == INT_MIN:
return x
if y == INT_MAX:
return INT_MIN
if y == INT_MIN:
return INT_MAX
return CapInt64(x - y)
| en | 0.843315 | # Copyright 2010-2018 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. helpers methods for the cp_model module. # hardcoded to be platform independent. Asserts that x is integer and x is in [min_int_64, max_int_64]. Asserts that x is integer and x is in [min_int_32, max_int_32]. Asserts that x is 0 or 1. Restrict v within [INT_MIN..INT_MAX] range. Saturated arithmetics. Returns x - y truncated to the int64 range. | 2.102106 | 2 |
software/glasgow/applet/interface/spi_controller/__init__.py | electroniceel/Glasgow | 1,014 | 6631783 | <reponame>electroniceel/Glasgow
import struct
import logging
import asyncio
import math
from nmigen.compat import *
from nmigen.compat.genlib.cdc import *
from ....support.logging import *
from ....gateware.clockgen import *
from ... import *
class SPIControllerBus(Module):
def __init__(self, pads, sck_idle, sck_edge, cs_active):
self.oe = Signal(reset=1)
self.sck = Signal(reset=sck_idle)
self.cs = Signal(reset=not cs_active)
self.copi = Signal()
self.cipo = Signal()
self.comb += [
pads.sck_t.oe.eq(self.oe),
pads.sck_t.o.eq(self.sck),
]
if hasattr(pads, "cs_t"):
self.comb += [
pads.cs_t.oe.eq(1),
pads.cs_t.o.eq(self.cs),
]
if hasattr(pads, "copi_t"):
self.comb += [
pads.copi_t.oe.eq(self.oe),
pads.copi_t.o.eq(self.copi)
]
if hasattr(pads, "cipo_t"):
self.specials += \
MultiReg(pads.cipo_t.i, self.cipo)
sck_r = Signal()
self.sync += sck_r.eq(self.sck)
self.setup = Signal()
self.latch = Signal()
if sck_edge in ("r", "rising"):
self.comb += [
self.setup.eq( sck_r & ~self.sck),
self.latch.eq(~sck_r & self.sck),
]
elif sck_edge in ("f", "falling"):
self.comb += [
self.setup.eq(~sck_r & self.sck),
self.latch.eq( sck_r & ~self.sck),
]
else:
assert False
CMD_MASK = 0b11110000
CMD_SHIFT = 0b00000000
CMD_DELAY = 0b00010000
CMD_SYNC = 0b00100000
# CMD_SHIFT
BIT_DATA_OUT = 0b0001
BIT_DATA_IN = 0b0010
BIT_HOLD_SS = 0b0100
class SPIControllerSubtarget(Module):
def __init__(self, pads, out_fifo, in_fifo, period_cyc, delay_cyc,
sck_idle, sck_edge, cs_active):
self.submodules.bus = SPIControllerBus(pads, sck_idle, sck_edge, cs_active)
###
self.submodules.clkgen = ResetInserter()(ClockGen(period_cyc))
timer = Signal(max=delay_cyc)
timer_en = Signal()
self.sync += [
If(timer != 0,
timer.eq(timer - 1)
).Elif(timer_en,
timer.eq(delay_cyc - 1)
)
]
shreg_o = Signal(8)
shreg_i = Signal(8)
self.comb += [
self.bus.sck.eq(self.clkgen.clk),
self.bus.copi.eq(shreg_o[-1]),
]
self.sync += [
If(self.bus.setup,
shreg_o.eq(Cat(C(0, 1), shreg_o))
).Elif(self.bus.latch,
shreg_i.eq(Cat(self.bus.cipo, shreg_i))
)
]
cmd = Signal(8)
count = Signal(16)
bitno = Signal(max=8 + 1)
self.submodules.fsm = FSM(reset_state="RECV-COMMAND")
self.fsm.act("RECV-COMMAND",
in_fifo.flush.eq(1),
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(cmd, out_fifo.dout),
If((out_fifo.dout & CMD_MASK) == CMD_SYNC,
NextState("SYNC")
).Else(
NextState("RECV-COUNT-1")
)
)
)
self.fsm.act("SYNC",
If(in_fifo.writable,
in_fifo.we.eq(1),
in_fifo.din.eq(0),
NextState("RECV-COMMAND")
)
)
self.fsm.act("RECV-COUNT-1",
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(count[0:8], out_fifo.dout),
NextState("RECV-COUNT-2")
)
)
self.fsm.act("RECV-COUNT-2",
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(count[8:16], out_fifo.dout),
If((cmd & CMD_MASK) == CMD_DELAY,
NextState("DELAY")
).Else(
NextState("COUNT-CHECK")
)
)
)
self.fsm.act("DELAY",
If(timer == 0,
If(count == 0,
NextState("RECV-COMMAND")
).Else(
NextValue(count, count - 1),
timer_en.eq(1)
)
)
)
self.fsm.act("COUNT-CHECK",
If(count == 0,
NextState("RECV-COMMAND"),
If((cmd & BIT_HOLD_SS) != 0,
NextValue(self.bus.cs, cs_active),
),
).Else(
NextValue(self.bus.cs, cs_active),
NextState("RECV-DATA")
)
)
self.fsm.act("RECV-DATA",
If((cmd & BIT_DATA_OUT) != 0,
out_fifo.re.eq(1),
NextValue(shreg_o, out_fifo.dout),
).Else(
NextValue(shreg_o, 0)
),
If(((cmd & BIT_DATA_IN) != 0) | out_fifo.readable,
NextValue(count, count - 1),
NextValue(bitno, 8),
NextState("TRANSFER")
)
)
self.comb += self.clkgen.reset.eq(~self.fsm.ongoing("TRANSFER")),
self.fsm.act("TRANSFER",
If(self.clkgen.stb_r,
NextValue(bitno, bitno - 1)
).Elif(self.clkgen.stb_f,
If(bitno == 0,
NextState("SEND-DATA")
),
)
)
self.fsm.act("SEND-DATA",
If((cmd & BIT_DATA_IN) != 0,
in_fifo.din.eq(shreg_i),
in_fifo.we.eq(1),
),
If(((cmd & BIT_DATA_OUT) != 0) | in_fifo.writable,
If(count == 0,
If((cmd & BIT_HOLD_SS) == 0,
NextValue(self.bus.cs, not cs_active),
),
NextState("RECV-COMMAND")
).Else(
NextState("RECV-DATA")
)
)
)
class SPIControllerInterface:
def __init__(self, interface, logger):
self.lower = interface
self._logger = logger
self._level = logging.DEBUG if self._logger.name == __name__ else logging.TRACE
def _log(self, message, *args):
self._logger.log(self._level, "SPI: " + message, *args)
async def reset(self):
self._log("reset")
await self.lower.reset()
@staticmethod
def _chunk_count(count, hold_ss, chunk_size=0xffff):
while count > chunk_size:
yield chunk_size, True
count -= chunk_size
yield count, hold_ss
@staticmethod
def _chunk_bytes(bytes, hold_ss, chunk_size=0xffff):
offset = 0
while len(bytes) - offset > chunk_size:
yield bytes[offset:offset + chunk_size], True
offset += chunk_size
yield bytes[offset:], hold_ss
async def transfer(self, data, hold_ss=False):
try:
out_data = memoryview(data)
except TypeError:
out_data = memoryview(bytes(data))
self._log("xfer-out=<%s>", dump_hex(out_data))
in_data = []
for out_data, hold_ss in self._chunk_bytes(out_data, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_IN|BIT_DATA_OUT|(BIT_HOLD_SS if hold_ss else 0),
len(out_data)))
await self.lower.write(out_data)
in_data.append(await self.lower.read(len(out_data)))
in_data = b"".join(in_data)
self._log("xfer-in=<%s>", dump_hex(in_data))
return in_data
async def read(self, count, hold_ss=False):
in_data = []
for count, hold_ss in self._chunk_count(count, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_IN|(BIT_HOLD_SS if hold_ss else 0),
count))
in_data.append(await self.lower.read(count))
in_data = b"".join(in_data)
self._log("read-in=<%s>", dump_hex(in_data))
return in_data
async def write(self, data, hold_ss=False):
try:
out_data = memoryview(data)
except TypeError:
out_data = memoryview(bytes(data))
self._log("write-out=<%s>", dump_hex(out_data))
for out_data, hold_ss in self._chunk_bytes(out_data, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_OUT|(BIT_HOLD_SS if hold_ss else 0),
len(out_data)))
await self.lower.write(out_data)
async def delay_us(self, delay):
self._log("delay=%d us", delay)
while delay > 0xffff:
await self.lower.write(struct.pack("<BH", CMD_DELAY, 0xffff))
delay -= 0xffff
await self.lower.write(struct.pack("<BH", CMD_DELAY, delay))
async def delay_ms(self, delay):
await self.delay_us(delay * 1000)
async def synchronize(self):
self._log("sync")
await self.lower.write([CMD_SYNC])
await self.lower.read(1)
class SPIControllerApplet(GlasgowApplet, name="spi-controller"):
logger = logging.getLogger(__name__)
help = "initiate SPI transactions"
description = """
Initiate transactions on the SPI bus.
"""
__pins = ("sck", "cs", "copi", "cipo")
@classmethod
def add_build_arguments(cls, parser, access, omit_pins=False):
super().add_build_arguments(parser, access)
if not omit_pins:
access.add_pin_argument(parser, "sck", required=True)
access.add_pin_argument(parser, "cs")
access.add_pin_argument(parser, "copi")
access.add_pin_argument(parser, "cipo")
parser.add_argument(
"-f", "--frequency", metavar="FREQ", type=int, default=100,
help="set SPI clock frequency to FREQ kHz (default: %(default)s)")
parser.add_argument(
"--sck-idle", metavar="LEVEL", type=int, choices=[0, 1], default=0,
help="set idle clock level to LEVEL (default: %(default)s)")
parser.add_argument(
"--sck-edge", metavar="EDGE", type=str, choices=["r", "rising", "f", "falling"],
default="rising",
help="latch data at clock edge EDGE (default: %(default)s)")
parser.add_argument(
"--cs-active", metavar="LEVEL", type=int, choices=[0, 1], default=0,
help="set active chip select level to LEVEL (default: %(default)s)")
def build(self, target, args, pins=__pins):
self.mux_interface = iface = target.multiplexer.claim_interface(self, args)
return iface.add_subtarget(SPIControllerSubtarget(
pads=iface.get_pads(args, pins=pins),
out_fifo=iface.get_out_fifo(),
in_fifo=iface.get_in_fifo(auto_flush=False),
period_cyc=self.derive_clock(input_hz=target.sys_clk_freq,
output_hz=args.frequency * 1000,
clock_name="sck",
# 2 cyc MultiReg delay from SCK to CIPO requires a 4 cyc
# period with current implementation of SERDES
min_cyc=4),
delay_cyc=self.derive_clock(input_hz=target.sys_clk_freq,
output_hz=1e6,
clock_name="delay"),
sck_idle=args.sck_idle,
sck_edge=args.sck_edge,
cs_active=args.cs_active,
))
async def run(self, device, args):
iface = await device.demultiplexer.claim_interface(self, self.mux_interface, args)
spi_iface = SPIControllerInterface(iface, self.logger)
return spi_iface
@classmethod
def add_interact_arguments(cls, parser):
def hex(arg): return bytes.fromhex(arg)
parser.add_argument(
"data", metavar="DATA", type=hex,
help="hex bytes to transfer to the device")
async def interact(self, device, args, spi_iface):
data = await spi_iface.transfer(args.data)
print(data.hex())
# -------------------------------------------------------------------------------------------------
class SPIControllerAppletTestCase(GlasgowAppletTestCase, applet=SPIControllerApplet):
@synthesis_test
def test_build(self):
self.assertBuilds(args=["--pin-sck", "0", "--pin-cs", "1",
"--pin-copi", "2", "--pin-cipo", "3"])
def setup_loopback(self):
self.build_simulated_applet()
mux_iface = self.applet.mux_interface
mux_iface.comb += mux_iface.pads.cipo_t.i.eq(mux_iface.pads.copi_t.o)
@applet_simulation_test("setup_loopback",
["--pin-sck", "0", "--pin-cs", "1",
"--pin-copi", "2", "--pin-cipo", "3",
"--frequency", "5000"])
@asyncio.coroutine
def test_loopback(self):
mux_iface = self.applet.mux_interface
spi_iface = yield from self.run_simulated_applet()
self.assertEqual((yield mux_iface.pads.cs_t.o), 1)
result = yield from spi_iface.transfer([0xAA, 0x55, 0x12, 0x34])
self.assertEqual(result, bytearray([0xAA, 0x55, 0x12, 0x34]))
self.assertEqual((yield mux_iface.pads.cs_t.o), 1)
| import struct
import logging
import asyncio
import math
from nmigen.compat import *
from nmigen.compat.genlib.cdc import *
from ....support.logging import *
from ....gateware.clockgen import *
from ... import *
class SPIControllerBus(Module):
def __init__(self, pads, sck_idle, sck_edge, cs_active):
self.oe = Signal(reset=1)
self.sck = Signal(reset=sck_idle)
self.cs = Signal(reset=not cs_active)
self.copi = Signal()
self.cipo = Signal()
self.comb += [
pads.sck_t.oe.eq(self.oe),
pads.sck_t.o.eq(self.sck),
]
if hasattr(pads, "cs_t"):
self.comb += [
pads.cs_t.oe.eq(1),
pads.cs_t.o.eq(self.cs),
]
if hasattr(pads, "copi_t"):
self.comb += [
pads.copi_t.oe.eq(self.oe),
pads.copi_t.o.eq(self.copi)
]
if hasattr(pads, "cipo_t"):
self.specials += \
MultiReg(pads.cipo_t.i, self.cipo)
sck_r = Signal()
self.sync += sck_r.eq(self.sck)
self.setup = Signal()
self.latch = Signal()
if sck_edge in ("r", "rising"):
self.comb += [
self.setup.eq( sck_r & ~self.sck),
self.latch.eq(~sck_r & self.sck),
]
elif sck_edge in ("f", "falling"):
self.comb += [
self.setup.eq(~sck_r & self.sck),
self.latch.eq( sck_r & ~self.sck),
]
else:
assert False
CMD_MASK = 0b11110000
CMD_SHIFT = 0b00000000
CMD_DELAY = 0b00010000
CMD_SYNC = 0b00100000
# CMD_SHIFT
BIT_DATA_OUT = 0b0001
BIT_DATA_IN = 0b0010
BIT_HOLD_SS = 0b0100
class SPIControllerSubtarget(Module):
def __init__(self, pads, out_fifo, in_fifo, period_cyc, delay_cyc,
sck_idle, sck_edge, cs_active):
self.submodules.bus = SPIControllerBus(pads, sck_idle, sck_edge, cs_active)
###
self.submodules.clkgen = ResetInserter()(ClockGen(period_cyc))
timer = Signal(max=delay_cyc)
timer_en = Signal()
self.sync += [
If(timer != 0,
timer.eq(timer - 1)
).Elif(timer_en,
timer.eq(delay_cyc - 1)
)
]
shreg_o = Signal(8)
shreg_i = Signal(8)
self.comb += [
self.bus.sck.eq(self.clkgen.clk),
self.bus.copi.eq(shreg_o[-1]),
]
self.sync += [
If(self.bus.setup,
shreg_o.eq(Cat(C(0, 1), shreg_o))
).Elif(self.bus.latch,
shreg_i.eq(Cat(self.bus.cipo, shreg_i))
)
]
cmd = Signal(8)
count = Signal(16)
bitno = Signal(max=8 + 1)
self.submodules.fsm = FSM(reset_state="RECV-COMMAND")
self.fsm.act("RECV-COMMAND",
in_fifo.flush.eq(1),
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(cmd, out_fifo.dout),
If((out_fifo.dout & CMD_MASK) == CMD_SYNC,
NextState("SYNC")
).Else(
NextState("RECV-COUNT-1")
)
)
)
self.fsm.act("SYNC",
If(in_fifo.writable,
in_fifo.we.eq(1),
in_fifo.din.eq(0),
NextState("RECV-COMMAND")
)
)
self.fsm.act("RECV-COUNT-1",
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(count[0:8], out_fifo.dout),
NextState("RECV-COUNT-2")
)
)
self.fsm.act("RECV-COUNT-2",
If(out_fifo.readable,
out_fifo.re.eq(1),
NextValue(count[8:16], out_fifo.dout),
If((cmd & CMD_MASK) == CMD_DELAY,
NextState("DELAY")
).Else(
NextState("COUNT-CHECK")
)
)
)
self.fsm.act("DELAY",
If(timer == 0,
If(count == 0,
NextState("RECV-COMMAND")
).Else(
NextValue(count, count - 1),
timer_en.eq(1)
)
)
)
self.fsm.act("COUNT-CHECK",
If(count == 0,
NextState("RECV-COMMAND"),
If((cmd & BIT_HOLD_SS) != 0,
NextValue(self.bus.cs, cs_active),
),
).Else(
NextValue(self.bus.cs, cs_active),
NextState("RECV-DATA")
)
)
self.fsm.act("RECV-DATA",
If((cmd & BIT_DATA_OUT) != 0,
out_fifo.re.eq(1),
NextValue(shreg_o, out_fifo.dout),
).Else(
NextValue(shreg_o, 0)
),
If(((cmd & BIT_DATA_IN) != 0) | out_fifo.readable,
NextValue(count, count - 1),
NextValue(bitno, 8),
NextState("TRANSFER")
)
)
self.comb += self.clkgen.reset.eq(~self.fsm.ongoing("TRANSFER")),
self.fsm.act("TRANSFER",
If(self.clkgen.stb_r,
NextValue(bitno, bitno - 1)
).Elif(self.clkgen.stb_f,
If(bitno == 0,
NextState("SEND-DATA")
),
)
)
self.fsm.act("SEND-DATA",
If((cmd & BIT_DATA_IN) != 0,
in_fifo.din.eq(shreg_i),
in_fifo.we.eq(1),
),
If(((cmd & BIT_DATA_OUT) != 0) | in_fifo.writable,
If(count == 0,
If((cmd & BIT_HOLD_SS) == 0,
NextValue(self.bus.cs, not cs_active),
),
NextState("RECV-COMMAND")
).Else(
NextState("RECV-DATA")
)
)
)
class SPIControllerInterface:
def __init__(self, interface, logger):
self.lower = interface
self._logger = logger
self._level = logging.DEBUG if self._logger.name == __name__ else logging.TRACE
def _log(self, message, *args):
self._logger.log(self._level, "SPI: " + message, *args)
async def reset(self):
self._log("reset")
await self.lower.reset()
@staticmethod
def _chunk_count(count, hold_ss, chunk_size=0xffff):
while count > chunk_size:
yield chunk_size, True
count -= chunk_size
yield count, hold_ss
@staticmethod
def _chunk_bytes(bytes, hold_ss, chunk_size=0xffff):
offset = 0
while len(bytes) - offset > chunk_size:
yield bytes[offset:offset + chunk_size], True
offset += chunk_size
yield bytes[offset:], hold_ss
async def transfer(self, data, hold_ss=False):
try:
out_data = memoryview(data)
except TypeError:
out_data = memoryview(bytes(data))
self._log("xfer-out=<%s>", dump_hex(out_data))
in_data = []
for out_data, hold_ss in self._chunk_bytes(out_data, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_IN|BIT_DATA_OUT|(BIT_HOLD_SS if hold_ss else 0),
len(out_data)))
await self.lower.write(out_data)
in_data.append(await self.lower.read(len(out_data)))
in_data = b"".join(in_data)
self._log("xfer-in=<%s>", dump_hex(in_data))
return in_data
async def read(self, count, hold_ss=False):
in_data = []
for count, hold_ss in self._chunk_count(count, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_IN|(BIT_HOLD_SS if hold_ss else 0),
count))
in_data.append(await self.lower.read(count))
in_data = b"".join(in_data)
self._log("read-in=<%s>", dump_hex(in_data))
return in_data
async def write(self, data, hold_ss=False):
try:
out_data = memoryview(data)
except TypeError:
out_data = memoryview(bytes(data))
self._log("write-out=<%s>", dump_hex(out_data))
for out_data, hold_ss in self._chunk_bytes(out_data, hold_ss):
await self.lower.write(struct.pack("<BH",
CMD_SHIFT|BIT_DATA_OUT|(BIT_HOLD_SS if hold_ss else 0),
len(out_data)))
await self.lower.write(out_data)
async def delay_us(self, delay):
self._log("delay=%d us", delay)
while delay > 0xffff:
await self.lower.write(struct.pack("<BH", CMD_DELAY, 0xffff))
delay -= 0xffff
await self.lower.write(struct.pack("<BH", CMD_DELAY, delay))
async def delay_ms(self, delay):
await self.delay_us(delay * 1000)
async def synchronize(self):
self._log("sync")
await self.lower.write([CMD_SYNC])
await self.lower.read(1)
class SPIControllerApplet(GlasgowApplet, name="spi-controller"):
logger = logging.getLogger(__name__)
help = "initiate SPI transactions"
description = """
Initiate transactions on the SPI bus.
"""
__pins = ("sck", "cs", "copi", "cipo")
@classmethod
def add_build_arguments(cls, parser, access, omit_pins=False):
super().add_build_arguments(parser, access)
if not omit_pins:
access.add_pin_argument(parser, "sck", required=True)
access.add_pin_argument(parser, "cs")
access.add_pin_argument(parser, "copi")
access.add_pin_argument(parser, "cipo")
parser.add_argument(
"-f", "--frequency", metavar="FREQ", type=int, default=100,
help="set SPI clock frequency to FREQ kHz (default: %(default)s)")
parser.add_argument(
"--sck-idle", metavar="LEVEL", type=int, choices=[0, 1], default=0,
help="set idle clock level to LEVEL (default: %(default)s)")
parser.add_argument(
"--sck-edge", metavar="EDGE", type=str, choices=["r", "rising", "f", "falling"],
default="rising",
help="latch data at clock edge EDGE (default: %(default)s)")
parser.add_argument(
"--cs-active", metavar="LEVEL", type=int, choices=[0, 1], default=0,
help="set active chip select level to LEVEL (default: %(default)s)")
def build(self, target, args, pins=__pins):
self.mux_interface = iface = target.multiplexer.claim_interface(self, args)
return iface.add_subtarget(SPIControllerSubtarget(
pads=iface.get_pads(args, pins=pins),
out_fifo=iface.get_out_fifo(),
in_fifo=iface.get_in_fifo(auto_flush=False),
period_cyc=self.derive_clock(input_hz=target.sys_clk_freq,
output_hz=args.frequency * 1000,
clock_name="sck",
# 2 cyc MultiReg delay from SCK to CIPO requires a 4 cyc
# period with current implementation of SERDES
min_cyc=4),
delay_cyc=self.derive_clock(input_hz=target.sys_clk_freq,
output_hz=1e6,
clock_name="delay"),
sck_idle=args.sck_idle,
sck_edge=args.sck_edge,
cs_active=args.cs_active,
))
async def run(self, device, args):
iface = await device.demultiplexer.claim_interface(self, self.mux_interface, args)
spi_iface = SPIControllerInterface(iface, self.logger)
return spi_iface
@classmethod
def add_interact_arguments(cls, parser):
def hex(arg): return bytes.fromhex(arg)
parser.add_argument(
"data", metavar="DATA", type=hex,
help="hex bytes to transfer to the device")
async def interact(self, device, args, spi_iface):
data = await spi_iface.transfer(args.data)
print(data.hex())
# -------------------------------------------------------------------------------------------------
class SPIControllerAppletTestCase(GlasgowAppletTestCase, applet=SPIControllerApplet):
@synthesis_test
def test_build(self):
self.assertBuilds(args=["--pin-sck", "0", "--pin-cs", "1",
"--pin-copi", "2", "--pin-cipo", "3"])
def setup_loopback(self):
self.build_simulated_applet()
mux_iface = self.applet.mux_interface
mux_iface.comb += mux_iface.pads.cipo_t.i.eq(mux_iface.pads.copi_t.o)
@applet_simulation_test("setup_loopback",
["--pin-sck", "0", "--pin-cs", "1",
"--pin-copi", "2", "--pin-cipo", "3",
"--frequency", "5000"])
@asyncio.coroutine
def test_loopback(self):
mux_iface = self.applet.mux_interface
spi_iface = yield from self.run_simulated_applet()
self.assertEqual((yield mux_iface.pads.cs_t.o), 1)
result = yield from spi_iface.transfer([0xAA, 0x55, 0x12, 0x34])
self.assertEqual(result, bytearray([0xAA, 0x55, 0x12, 0x34]))
self.assertEqual((yield mux_iface.pads.cs_t.o), 1) | en | 0.547555 | # CMD_SHIFT ### Initiate transactions on the SPI bus. # 2 cyc MultiReg delay from SCK to CIPO requires a 4 cyc # period with current implementation of SERDES # ------------------------------------------------------------------------------------------------- | 2.124036 | 2 |
qnarre/base/doc.py | quantapix/qnarre.com | 0 | 6631784 | # Copyright 2019 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import datetime as dt
from ..rectify import rectifier
from .author import Author
from .named import Named, Saved
class Genre(Named):
pass
class Doc(Saved, Named):
suff = '.txt'
pages = None
def __init__(self, genre=None, author=None, title=None, pages=None, **kw):
super().__init__(**kw)
if genre:
self.genre = Genre.create(name=genre)
if author:
self.author = author
if title:
self.title = title
if pages:
self.pages = pages
@property
def factor(self):
return self.genre.factor * super().factor
@property
def bias(self):
return self.genre.bias + super().bias
@property
def date(self):
s = self.name.split('/')[2]
s = '-'.join(s.split('-')[:3])
return dt.datetime.strptime(s, '%y-%m-%d').date()
@property
def props(self):
return {
'name': self.name,
'genre': self.genre.name,
'author': self.author,
'title': self.title,
}
@property
def fields(self):
s = '{}.pdf'.format(self.name)
fs = {'Date': self.date, 'Title': self.title, 'Source': s}
fs.update(Author.create(name=self.author).fields)
fs.update({'Type': self.tag, 'Genre': self.genre.name})
return fs
def from_text(self, txt, **_):
txt = tuple(rectifier(txt))
self.title = txt[0]
txt = '\n'.join(txt[2:])
self.pages = gs = []
for g in txt.split('\n\n\n'):
rs = []
for r in g.split('\n\n'):
rs.append(r.splitlines())
gs.append(rs)
def to_text(self, **_):
txt = [self.title, '']
for rs in self.pages:
for ls in rs:
txt.extend(ls)
txt.append('')
txt.append('')
return '\n'.join(txt).strip()
| # Copyright 2019 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import datetime as dt
from ..rectify import rectifier
from .author import Author
from .named import Named, Saved
class Genre(Named):
pass
class Doc(Saved, Named):
suff = '.txt'
pages = None
def __init__(self, genre=None, author=None, title=None, pages=None, **kw):
super().__init__(**kw)
if genre:
self.genre = Genre.create(name=genre)
if author:
self.author = author
if title:
self.title = title
if pages:
self.pages = pages
@property
def factor(self):
return self.genre.factor * super().factor
@property
def bias(self):
return self.genre.bias + super().bias
@property
def date(self):
s = self.name.split('/')[2]
s = '-'.join(s.split('-')[:3])
return dt.datetime.strptime(s, '%y-%m-%d').date()
@property
def props(self):
return {
'name': self.name,
'genre': self.genre.name,
'author': self.author,
'title': self.title,
}
@property
def fields(self):
s = '{}.pdf'.format(self.name)
fs = {'Date': self.date, 'Title': self.title, 'Source': s}
fs.update(Author.create(name=self.author).fields)
fs.update({'Type': self.tag, 'Genre': self.genre.name})
return fs
def from_text(self, txt, **_):
txt = tuple(rectifier(txt))
self.title = txt[0]
txt = '\n'.join(txt[2:])
self.pages = gs = []
for g in txt.split('\n\n\n'):
rs = []
for r in g.split('\n\n'):
rs.append(r.splitlines())
gs.append(rs)
def to_text(self, **_):
txt = [self.title, '']
for rs in self.pages:
for ls in rs:
txt.extend(ls)
txt.append('')
txt.append('')
return '\n'.join(txt).strip()
| en | 0.813792 | # Copyright 2019 Quantapix Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= | 2.392159 | 2 |
src/toil_lib/test/__init__.py | BD2KGenomics/toil-lib | 4 | 6631785 | # Copyright (C) 2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import unittest
import os
from toil.job import Job
class DockerCallTest(unittest.TestCase):
"""
This class handles creating a tmpdir and Toil options suitable for a unittest.
"""
def setUp(self):
super(DockerCallTest, self).setUp()
# the test tmpdir needs to be in the home directory so files written onto mounted
# directories from a Docker container will be visible on the host
# https://docs.docker.com/docker-for-mac/osxfs/
home = os.path.expanduser("~") + '/'
self.tmpdir = tempfile.mkdtemp(prefix=home)
self.options = Job.Runner.getDefaultOptions(os.path.join(str(self.tmpdir), 'jobstore'))
self.options.clean = 'always'
def tearDown(self):
# delete temp
super(DockerCallTest, self).tearDown()
for file in os.listdir(self.tmpdir):
os.remove(os.path.join(self.tmpdir, file))
os.removedirs(self.tmpdir)
# this is lifted from toil.test; perhaps refactor into bpl?
try:
# noinspection PyUnresolvedReferences
from _pytest.mark import MarkDecorator
except ImportError:
# noinspection PyUnusedLocal
def _mark_test(name, test_item):
return test_item
else:
def _mark_test(name, test_item):
return MarkDecorator(name)(test_item)
def needs_spark(test_item):
"""
Use as a decorator before test classes or methods to only run them if Spark is usable.
"""
test_item = _mark_test('spark', test_item)
try:
# noinspection PyUnresolvedReferences
import pyspark
except ImportError:
return unittest.skip("Skipping test. Install PySpark to include this test.")(test_item)
except:
raise
else:
return test_item
| # Copyright (C) 2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import unittest
import os
from toil.job import Job
class DockerCallTest(unittest.TestCase):
"""
This class handles creating a tmpdir and Toil options suitable for a unittest.
"""
def setUp(self):
super(DockerCallTest, self).setUp()
# the test tmpdir needs to be in the home directory so files written onto mounted
# directories from a Docker container will be visible on the host
# https://docs.docker.com/docker-for-mac/osxfs/
home = os.path.expanduser("~") + '/'
self.tmpdir = tempfile.mkdtemp(prefix=home)
self.options = Job.Runner.getDefaultOptions(os.path.join(str(self.tmpdir), 'jobstore'))
self.options.clean = 'always'
def tearDown(self):
# delete temp
super(DockerCallTest, self).tearDown()
for file in os.listdir(self.tmpdir):
os.remove(os.path.join(self.tmpdir, file))
os.removedirs(self.tmpdir)
# this is lifted from toil.test; perhaps refactor into bpl?
try:
# noinspection PyUnresolvedReferences
from _pytest.mark import MarkDecorator
except ImportError:
# noinspection PyUnusedLocal
def _mark_test(name, test_item):
return test_item
else:
def _mark_test(name, test_item):
return MarkDecorator(name)(test_item)
def needs_spark(test_item):
"""
Use as a decorator before test classes or methods to only run them if Spark is usable.
"""
test_item = _mark_test('spark', test_item)
try:
# noinspection PyUnresolvedReferences
import pyspark
except ImportError:
return unittest.skip("Skipping test. Install PySpark to include this test.")(test_item)
except:
raise
else:
return test_item
| en | 0.854222 | # Copyright (C) 2016 Regents of the University of California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. This class handles creating a tmpdir and Toil options suitable for a unittest. # the test tmpdir needs to be in the home directory so files written onto mounted # directories from a Docker container will be visible on the host # https://docs.docker.com/docker-for-mac/osxfs/ # delete temp # this is lifted from toil.test; perhaps refactor into bpl? # noinspection PyUnresolvedReferences # noinspection PyUnusedLocal Use as a decorator before test classes or methods to only run them if Spark is usable. # noinspection PyUnresolvedReferences | 1.879953 | 2 |
graphvalues.py | ContinuumBridge/scripts | 0 | 6631786 | <reponame>ContinuumBridge/scripts<gh_stars>0
#!/usr/bin/env python
# checkeew.py
# Copyright (C) ContinuumBridge Limited, 2013-14 - All Rights Reserved
# Unauthorized copying of this file, via any medium is strictly prohibited
# Proprietary and confidential
# Written by <NAME>
#
SENSORS = ['temperature','ir_temperature', 'rel_humidity']
# Include the Dropbox SDK
from dropbox.client import DropboxClient, DropboxOAuth2Flow, DropboxOAuth2FlowNoRedirect
from dropbox.rest import ErrorResponse, RESTSocketError
from dropbox.datastore import DatastoreError, DatastoreManager, Date, Bytes
from pprint import pprint
import time
import os, sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
class CheckEEW():
def __init__(self, argv):
if len(argv) < 2:
print "Usage: checkbridge <bridge>"
exit()
else:
self.bridges = [argv[1]]
for b in self.bridges:
b = b.lower()
print "Checking ", self.bridges
access_token = os.getenv('CB_DROPBOX_TOKEN', 'NO_TOKEN')
if access_token == "NO_TOKEN":
print "No Dropbox access token. You must set CB_DROPBOX_TOKEN environment variable first."
exit()
try:
self.client = DropboxClient(access_token)
except:
print "Could not access Dropbox. Wrong access token?"
exit()
self.manager = DatastoreManager(self.client)
self.process()
def niceTime(self, timeStamp):
localtime = time.localtime(timeStamp)
milliseconds = '%03d' % int((timeStamp - int(timeStamp)) * 1000)
now = time.strftime('%Y:%m:%d, %H:%M:%S:', localtime) + milliseconds
return now
def matrix_to_string(self,matrix, header=None):
"""
Return a pretty, aligned string representation of a nxm matrix.
This representation can be used to print any tabular data, such as
database results. It works by scanning the lengths of each element
in each column, and determining the format string dynamically.
@param matrix: Matrix representation (list with n rows of m elements).
@param header: Optional tuple or list with header elements to be displayed.
"""
if type(header) is list:
header = tuple(header)
lengths = []
if header:
for column in header:
lengths.append(len(column))
for row in matrix:
for column in row:
i = row.index(column)
column = str(column)
cl = len(column)
try:
ml = lengths[i]
if cl > ml:
lengths[i] = cl
except IndexError:
lengths.append(cl)
lengths = tuple(lengths)
format_string = ""
for length in lengths:
format_string += "%-" + str(length) + "s "
format_string += "\n"
matrix_str = ""
if header:
matrix_str += format_string % header
for row in matrix:
matrix_str += format_string % tuple(row)
return matrix_str
def process(self):
for bridge in self.bridges:
print "Reaading and processing data for ", bridge
fileName = bridge + ".csv"
self.f = open(fileName, "w", 0)
rows = []
ds = self.manager.open_or_create_datastore(bridge)
t = ds.get_table('config')
devices = t.query(type='idtoname')
values = []
commas = ""
heads = ""
devSensors = []
for d in devices:
devHandle = d.get('device')
devName = d.get('name')
self.f.write(devHandle + ',' + devName + '\n')
t = ds.get_table(devHandle)
for sensor in SENSORS:
heads = heads + devName + ' ' + sensor + ','
devSensors.append([devName, sensor])
readings = t.query(Type=sensor)
max = 0
for r in readings:
timeStamp = float(r.get('Date'))
if timeStamp > max:
max = timeStamp
dat = r.get('Data')
line = commas + str("%2.1f" %dat)
values.append([timeStamp, line])
commas += ","
rows.append([devHandle, devName, sensor, self.niceTime(max)])
values.sort(key=lambda tup: tup[0])
print "Type the numbers of the values you want to plot, separated by spaces:"
for d in devSensors:
print devSensors.index(d) + 1, ":", d[0], d[1]
request = raw_input("Values > ")
toProcess = request.split()
print "toProcess: ", toProcess
#self.f.write(heads + '\n')
#for v in values:
#line = self.niceTime(v[0]) + "," + v[1] + "\n"
#self.f.write(line)
#header = ('Handle', 'Friendly Name', 'Sensor', 'Most Recent Sample')
#txt = self.matrix_to_string(rows, header)
#print txt
if __name__ == '__main__':
c = CheckEEW(sys.argv)
| #!/usr/bin/env python
# checkeew.py
# Copyright (C) ContinuumBridge Limited, 2013-14 - All Rights Reserved
# Unauthorized copying of this file, via any medium is strictly prohibited
# Proprietary and confidential
# Written by <NAME>
#
SENSORS = ['temperature','ir_temperature', 'rel_humidity']
# Include the Dropbox SDK
from dropbox.client import DropboxClient, DropboxOAuth2Flow, DropboxOAuth2FlowNoRedirect
from dropbox.rest import ErrorResponse, RESTSocketError
from dropbox.datastore import DatastoreError, DatastoreManager, Date, Bytes
from pprint import pprint
import time
import os, sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
class CheckEEW():
def __init__(self, argv):
if len(argv) < 2:
print "Usage: checkbridge <bridge>"
exit()
else:
self.bridges = [argv[1]]
for b in self.bridges:
b = b.lower()
print "Checking ", self.bridges
access_token = os.getenv('CB_DROPBOX_TOKEN', 'NO_TOKEN')
if access_token == "NO_TOKEN":
print "No Dropbox access token. You must set CB_DROPBOX_TOKEN environment variable first."
exit()
try:
self.client = DropboxClient(access_token)
except:
print "Could not access Dropbox. Wrong access token?"
exit()
self.manager = DatastoreManager(self.client)
self.process()
def niceTime(self, timeStamp):
localtime = time.localtime(timeStamp)
milliseconds = '%03d' % int((timeStamp - int(timeStamp)) * 1000)
now = time.strftime('%Y:%m:%d, %H:%M:%S:', localtime) + milliseconds
return now
def matrix_to_string(self,matrix, header=None):
"""
Return a pretty, aligned string representation of a nxm matrix.
This representation can be used to print any tabular data, such as
database results. It works by scanning the lengths of each element
in each column, and determining the format string dynamically.
@param matrix: Matrix representation (list with n rows of m elements).
@param header: Optional tuple or list with header elements to be displayed.
"""
if type(header) is list:
header = tuple(header)
lengths = []
if header:
for column in header:
lengths.append(len(column))
for row in matrix:
for column in row:
i = row.index(column)
column = str(column)
cl = len(column)
try:
ml = lengths[i]
if cl > ml:
lengths[i] = cl
except IndexError:
lengths.append(cl)
lengths = tuple(lengths)
format_string = ""
for length in lengths:
format_string += "%-" + str(length) + "s "
format_string += "\n"
matrix_str = ""
if header:
matrix_str += format_string % header
for row in matrix:
matrix_str += format_string % tuple(row)
return matrix_str
def process(self):
for bridge in self.bridges:
print "Reaading and processing data for ", bridge
fileName = bridge + ".csv"
self.f = open(fileName, "w", 0)
rows = []
ds = self.manager.open_or_create_datastore(bridge)
t = ds.get_table('config')
devices = t.query(type='idtoname')
values = []
commas = ""
heads = ""
devSensors = []
for d in devices:
devHandle = d.get('device')
devName = d.get('name')
self.f.write(devHandle + ',' + devName + '\n')
t = ds.get_table(devHandle)
for sensor in SENSORS:
heads = heads + devName + ' ' + sensor + ','
devSensors.append([devName, sensor])
readings = t.query(Type=sensor)
max = 0
for r in readings:
timeStamp = float(r.get('Date'))
if timeStamp > max:
max = timeStamp
dat = r.get('Data')
line = commas + str("%2.1f" %dat)
values.append([timeStamp, line])
commas += ","
rows.append([devHandle, devName, sensor, self.niceTime(max)])
values.sort(key=lambda tup: tup[0])
print "Type the numbers of the values you want to plot, separated by spaces:"
for d in devSensors:
print devSensors.index(d) + 1, ":", d[0], d[1]
request = raw_input("Values > ")
toProcess = request.split()
print "toProcess: ", toProcess
#self.f.write(heads + '\n')
#for v in values:
#line = self.niceTime(v[0]) + "," + v[1] + "\n"
#self.f.write(line)
#header = ('Handle', 'Friendly Name', 'Sensor', 'Most Recent Sample')
#txt = self.matrix_to_string(rows, header)
#print txt
if __name__ == '__main__':
c = CheckEEW(sys.argv) | en | 0.682063 | #!/usr/bin/env python # checkeew.py # Copyright (C) ContinuumBridge Limited, 2013-14 - All Rights Reserved # Unauthorized copying of this file, via any medium is strictly prohibited # Proprietary and confidential # Written by <NAME> # # Include the Dropbox SDK Return a pretty, aligned string representation of a nxm matrix. This representation can be used to print any tabular data, such as database results. It works by scanning the lengths of each element in each column, and determining the format string dynamically. @param matrix: Matrix representation (list with n rows of m elements). @param header: Optional tuple or list with header elements to be displayed. #self.f.write(heads + '\n') #for v in values: #line = self.niceTime(v[0]) + "," + v[1] + "\n" #self.f.write(line) #header = ('Handle', 'Friendly Name', 'Sensor', 'Most Recent Sample') #txt = self.matrix_to_string(rows, header) #print txt | 2.787433 | 3 |
trebelge/XMLFileProcessStrategy/XMLFileProcessStrategy.py | askmetoo/trebelge | 0 | 6631787 | <filename>trebelge/XMLFileProcessStrategy/XMLFileProcessStrategy.py
from abc import ABC, abstractmethod
from trebelge.XMLFileProcessStrategy import XMLFileProcessStrategyContext
class XMLFileProcessStrategy(ABC):
"""
The Strategy interface declares operations common to all supported versions
of some algorithm.
The Context uses this interface to call the algorithm defined by Concrete
Strategies.
"""
@abstractmethod
def return_xml_file_data(self, context: XMLFileProcessStrategyContext):
pass
| <filename>trebelge/XMLFileProcessStrategy/XMLFileProcessStrategy.py
from abc import ABC, abstractmethod
from trebelge.XMLFileProcessStrategy import XMLFileProcessStrategyContext
class XMLFileProcessStrategy(ABC):
"""
The Strategy interface declares operations common to all supported versions
of some algorithm.
The Context uses this interface to call the algorithm defined by Concrete
Strategies.
"""
@abstractmethod
def return_xml_file_data(self, context: XMLFileProcessStrategyContext):
pass
| en | 0.82851 | The Strategy interface declares operations common to all supported versions of some algorithm. The Context uses this interface to call the algorithm defined by Concrete Strategies. | 2.77782 | 3 |
flash/image/detection/finetuning.py | tszumowski/lightning-flash | 0 | 6631788 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytorch_lightning as pl
from flash.core.finetuning import FlashBaseFinetuning
class ObjectDetectionFineTuning(FlashBaseFinetuning):
"""Freezes the backbone during Detector training."""
def __init__(self, train_bn: bool = True) -> None:
super().__init__(train_bn=train_bn)
def freeze_before_training(self, pl_module: pl.LightningModule) -> None:
model = pl_module.model
self.freeze(modules=model.backbone, train_bn=self.train_bn)
| # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytorch_lightning as pl
from flash.core.finetuning import FlashBaseFinetuning
class ObjectDetectionFineTuning(FlashBaseFinetuning):
"""Freezes the backbone during Detector training."""
def __init__(self, train_bn: bool = True) -> None:
super().__init__(train_bn=train_bn)
def freeze_before_training(self, pl_module: pl.LightningModule) -> None:
model = pl_module.model
self.freeze(modules=model.backbone, train_bn=self.train_bn)
| en | 0.869033 | # Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Freezes the backbone during Detector training. | 1.735695 | 2 |
src/skmultiflow/trees/gaussian_estimator.py | AndreFCruz/scikit-multiflow | 1 | 6631789 | import math
from skmultiflow.utils.statistics import normal_probability
class GaussianEstimator(object):
""" GaussianEstimator
Gaussian incremental estimator that uses incremental method that is more resistant to floating point imprecision.
For more info, <NAME>'s "The Art of Computer Programming, Volume 2: Seminumerical Algorithms", section 4.2.2.
"""
def __init__(self):
self._weight_sum = 0.0
self._mean = 0.0
self._variance_sum = 0.0
self._NORMAL_CONSTANT = math.sqrt(2 * math.pi)
def add_observation(self, value, weight):
""" add_observation
Adds a new observation and updates statistics
Parameters
----------
value: The value
weight: The weight of the instance
Returns
-------
self
"""
if value is None or math.isinf(value):
return
if self._weight_sum > 0.0:
self._weight_sum += weight
last_mean = self._mean
self._mean += weight * (value - last_mean) / self._weight_sum
self._variance_sum += weight * (value - last_mean) * (value - self._mean)
else:
self._mean = value
self._weight_sum = weight
def get_total_weight_observed(self):
return self._weight_sum
def get_mean(self):
return self._mean
def get_std_dev(self):
return math.sqrt(self.get_variance())
def get_variance(self):
return self._variance_sum / (self._weight_sum - 1.0) if self._weight_sum > 1.0 else 0.0
def probability_density(self, value):
""" probability_density
Calculates the normal distribution
Parameters
----------
value: The value
Returns
-------
Probability density (normal distribution)
"""
if self._weight_sum > 0.0:
std_dev = self.get_std_dev()
mean = self.get_mean()
if std_dev > 0.0:
diff = value - mean
return (1.0 / (self._NORMAL_CONSTANT * std_dev)) * math.exp(-(diff * diff / (2.0 * std_dev * std_dev)))
if value == mean:
return 1.0
return 0.0
def estimated_weight_lessthan_equalto_greaterthan_value(self, value):
equalto_weight = self.probability_density(value) * self._weight_sum
std_dev = self.get_std_dev()
mean = self.get_mean()
if std_dev > 0.0:
lessthan_weight = normal_probability((value - mean) / std_dev) * self._weight_sum - equalto_weight
else:
if value < mean:
lessthan_weight = self._weight_sum - equalto_weight
else:
lessthan_weight = 0.0
greaterthan_weight = self._weight_sum - equalto_weight - lessthan_weight
if greaterthan_weight < 0.0:
greaterthan_weight = 0.0
return [lessthan_weight, equalto_weight, greaterthan_weight]
| import math
from skmultiflow.utils.statistics import normal_probability
class GaussianEstimator(object):
""" GaussianEstimator
Gaussian incremental estimator that uses incremental method that is more resistant to floating point imprecision.
For more info, <NAME>'s "The Art of Computer Programming, Volume 2: Seminumerical Algorithms", section 4.2.2.
"""
def __init__(self):
self._weight_sum = 0.0
self._mean = 0.0
self._variance_sum = 0.0
self._NORMAL_CONSTANT = math.sqrt(2 * math.pi)
def add_observation(self, value, weight):
""" add_observation
Adds a new observation and updates statistics
Parameters
----------
value: The value
weight: The weight of the instance
Returns
-------
self
"""
if value is None or math.isinf(value):
return
if self._weight_sum > 0.0:
self._weight_sum += weight
last_mean = self._mean
self._mean += weight * (value - last_mean) / self._weight_sum
self._variance_sum += weight * (value - last_mean) * (value - self._mean)
else:
self._mean = value
self._weight_sum = weight
def get_total_weight_observed(self):
return self._weight_sum
def get_mean(self):
return self._mean
def get_std_dev(self):
return math.sqrt(self.get_variance())
def get_variance(self):
return self._variance_sum / (self._weight_sum - 1.0) if self._weight_sum > 1.0 else 0.0
def probability_density(self, value):
""" probability_density
Calculates the normal distribution
Parameters
----------
value: The value
Returns
-------
Probability density (normal distribution)
"""
if self._weight_sum > 0.0:
std_dev = self.get_std_dev()
mean = self.get_mean()
if std_dev > 0.0:
diff = value - mean
return (1.0 / (self._NORMAL_CONSTANT * std_dev)) * math.exp(-(diff * diff / (2.0 * std_dev * std_dev)))
if value == mean:
return 1.0
return 0.0
def estimated_weight_lessthan_equalto_greaterthan_value(self, value):
equalto_weight = self.probability_density(value) * self._weight_sum
std_dev = self.get_std_dev()
mean = self.get_mean()
if std_dev > 0.0:
lessthan_weight = normal_probability((value - mean) / std_dev) * self._weight_sum - equalto_weight
else:
if value < mean:
lessthan_weight = self._weight_sum - equalto_weight
else:
lessthan_weight = 0.0
greaterthan_weight = self._weight_sum - equalto_weight - lessthan_weight
if greaterthan_weight < 0.0:
greaterthan_weight = 0.0
return [lessthan_weight, equalto_weight, greaterthan_weight]
| en | 0.5981 | GaussianEstimator Gaussian incremental estimator that uses incremental method that is more resistant to floating point imprecision. For more info, <NAME>'s "The Art of Computer Programming, Volume 2: Seminumerical Algorithms", section 4.2.2. add_observation Adds a new observation and updates statistics Parameters ---------- value: The value weight: The weight of the instance Returns ------- self probability_density Calculates the normal distribution Parameters ---------- value: The value Returns ------- Probability density (normal distribution) | 3.434069 | 3 |
websocks/utils.py | abersheeran/websocks | 91 | 6631790 | import asyncio
import os
import threading
from asyncio import AbstractEventLoop, Task, Future
from typing import Tuple, Dict, Any, Set, Optional, Coroutine
class Singleton(type):
def __init__(
cls,
name: str,
bases: Tuple[type],
namespace: Dict[str, Any],
) -> None:
cls.instance = None
super().__init__(name, bases, namespace)
def __call__(cls, *args, **kwargs) -> Any:
if cls.instance is None:
cls.instance = super().__call__(*args, **kwargs)
return cls.instance
def onlyfirst(*coros: Coroutine, loop: Optional[AbstractEventLoop] = None) -> Future:
"""
Execute multiple coroutines concurrently, returning only the results of the first execution.
When one is completed, the execution of other coroutines will be canceled.
"""
loop = loop or asyncio.get_running_loop()
tasks: Set[Task] = set()
result, _future = loop.create_future(), None
def _done_callback(fut: Future) -> None:
nonlocal result, _future
if result.cancelled():
return # nothing to do on onlyfirst cancelled
if _future is None:
_future = fut # record first completed future
cancel_all_task()
if not result.done():
if _future.exception() is None:
result.set_result(_future.result())
else:
result.set_exception(_future.exception())
def cancel_all_task() -> None:
for task in tasks:
task.remove_done_callback(_done_callback)
for task in filter(lambda task: not task.done(), tasks):
task.cancel()
for coro in coros:
task: Task = loop.create_task(coro)
task.add_done_callback(_done_callback)
tasks.add(task)
result.add_done_callback(lambda fut: cancel_all_task())
return result
class State(dict):
"""
An object that can be used to store arbitrary state.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.sync_lock = threading.Lock()
self.async_lock = asyncio.Lock()
def __enter__(self):
self.sync_lock.acquire()
return self
def __exit__(self, exc_type, value, traceback):
self.sync_lock.release()
async def __aenter__(self):
await self.async_lock.acquire()
return self
async def __aexit__(self, exc_type, value, traceback):
self.async_lock.release()
def __setattr__(self, name: Any, value: Any) -> None:
self[name] = value
def __getattr__(self, name: Any) -> Any:
try:
return self[name]
except KeyError:
message = "'{}' object has no attribute '{}'"
raise AttributeError(message.format(self.__class__.__name__, name))
def __delattr__(self, name: Any) -> None:
del self[name]
if os.name == "nt":
import winreg
def set_proxy(enable: bool, proxy: str) -> None:
"""
设定系统的网络代理
"""
key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings",
0,
winreg.KEY_WRITE,
)
winreg.SetValueEx(key, "ProxyEnable", 0, winreg.REG_DWORD, int(enable))
winreg.SetValueEx(key, "ProxyServer", 0, winreg.REG_SZ, proxy)
winreg.CloseKey(key)
def get_proxy() -> Tuple[bool, str]:
"""
获取系统的网络代理设置
"""
key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings",
0,
winreg.KEY_READ,
)
try:
return (
bool(winreg.QueryValueEx(key, "ProxyEnable")[0]),
winreg.QueryValueEx(key, "ProxyServer")[0],
)
except FileNotFoundError:
return False, ""
finally:
winreg.CloseKey(key)
else:
def set_proxy(enable: bool, proxy: str) -> None:
pass
def get_proxy() -> Tuple[bool, str]:
return True, ""
| import asyncio
import os
import threading
from asyncio import AbstractEventLoop, Task, Future
from typing import Tuple, Dict, Any, Set, Optional, Coroutine
class Singleton(type):
def __init__(
cls,
name: str,
bases: Tuple[type],
namespace: Dict[str, Any],
) -> None:
cls.instance = None
super().__init__(name, bases, namespace)
def __call__(cls, *args, **kwargs) -> Any:
if cls.instance is None:
cls.instance = super().__call__(*args, **kwargs)
return cls.instance
def onlyfirst(*coros: Coroutine, loop: Optional[AbstractEventLoop] = None) -> Future:
"""
Execute multiple coroutines concurrently, returning only the results of the first execution.
When one is completed, the execution of other coroutines will be canceled.
"""
loop = loop or asyncio.get_running_loop()
tasks: Set[Task] = set()
result, _future = loop.create_future(), None
def _done_callback(fut: Future) -> None:
nonlocal result, _future
if result.cancelled():
return # nothing to do on onlyfirst cancelled
if _future is None:
_future = fut # record first completed future
cancel_all_task()
if not result.done():
if _future.exception() is None:
result.set_result(_future.result())
else:
result.set_exception(_future.exception())
def cancel_all_task() -> None:
for task in tasks:
task.remove_done_callback(_done_callback)
for task in filter(lambda task: not task.done(), tasks):
task.cancel()
for coro in coros:
task: Task = loop.create_task(coro)
task.add_done_callback(_done_callback)
tasks.add(task)
result.add_done_callback(lambda fut: cancel_all_task())
return result
class State(dict):
"""
An object that can be used to store arbitrary state.
"""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.sync_lock = threading.Lock()
self.async_lock = asyncio.Lock()
def __enter__(self):
self.sync_lock.acquire()
return self
def __exit__(self, exc_type, value, traceback):
self.sync_lock.release()
async def __aenter__(self):
await self.async_lock.acquire()
return self
async def __aexit__(self, exc_type, value, traceback):
self.async_lock.release()
def __setattr__(self, name: Any, value: Any) -> None:
self[name] = value
def __getattr__(self, name: Any) -> Any:
try:
return self[name]
except KeyError:
message = "'{}' object has no attribute '{}'"
raise AttributeError(message.format(self.__class__.__name__, name))
def __delattr__(self, name: Any) -> None:
del self[name]
if os.name == "nt":
import winreg
def set_proxy(enable: bool, proxy: str) -> None:
"""
设定系统的网络代理
"""
key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings",
0,
winreg.KEY_WRITE,
)
winreg.SetValueEx(key, "ProxyEnable", 0, winreg.REG_DWORD, int(enable))
winreg.SetValueEx(key, "ProxyServer", 0, winreg.REG_SZ, proxy)
winreg.CloseKey(key)
def get_proxy() -> Tuple[bool, str]:
"""
获取系统的网络代理设置
"""
key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings",
0,
winreg.KEY_READ,
)
try:
return (
bool(winreg.QueryValueEx(key, "ProxyEnable")[0]),
winreg.QueryValueEx(key, "ProxyServer")[0],
)
except FileNotFoundError:
return False, ""
finally:
winreg.CloseKey(key)
else:
def set_proxy(enable: bool, proxy: str) -> None:
pass
def get_proxy() -> Tuple[bool, str]:
return True, ""
| en | 0.862375 | Execute multiple coroutines concurrently, returning only the results of the first execution. When one is completed, the execution of other coroutines will be canceled. # nothing to do on onlyfirst cancelled # record first completed future An object that can be used to store arbitrary state. 设定系统的网络代理 获取系统的网络代理设置 | 2.841775 | 3 |
Calibration/HcalAlCaRecoProducers/test/AlCaHBHEMuonProducerFilter_cfg.py | malbouis/cmssw | 852 | 6631791 | import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Run2_2018_cff import Run2_2018
process = cms.Process("AlCaHBHEMuon",Run2_2018)
process.load('Configuration.StandardSequences.Services_cff')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load('Configuration.StandardSequences.AlCaRecoStreams_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
from Configuration.AlCa.autoCond import autoCond
process.GlobalTag.globaltag=autoCond['run2_data']
process.load("RecoLocalCalo.EcalRecAlgos.EcalSeverityLevelESProducer_cfi")
if hasattr(process,'MessageLogger'):
process.MessageLogger.HBHEMuon=dict()
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
'file:/eos/cms/store/group/dpg_hcal/comm_hcal/AmanKaur/HLTPhysics/RawtoReco_HLTPhysics/210910_053130/0000/RECO_RAW2DIGI_L1Reco_RECO_ALCA_1.root',
'file:/eos/cms/store/group/dpg_hcal/comm_hcal/AmanKaur/HLTPhysics/RawtoReco_HLTPhysics/210910_053130/0000/RECO_RAW2DIGI_L1Reco_RECO_ALCA_2.root',
)
)
process.ALCARECOStreamHcalCalHBHEMuon = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOHcalCalHBHEMuonProducerFilter')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('ALCARECO'),
filterName = cms.untracked.string('ALCARECOHcalCalHBHEMuonProducerFilter')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.OutALCARECOHcalCalHBHEMuonProducerFilter.outputCommands,
fileName = cms.untracked.string('OutputHBHEMuonProducerFilter.root'),
)
process.alcaHcalHBHEMuonProducer.triggers = []
# Path and EndPath definitions
process.endjob_step = cms.EndPath(process.endOfProcess)
process.ALCARECOStreamHcalCalHBHEMuonProducerFilterOutPath = cms.EndPath(process.ALCARECOStreamHcalCalHBHEMuon)
# Schedule definition
process.schedule = cms.Schedule(process.pathALCARECOHcalCalHBHEMuonProducerFilter,process.endjob_step,process.ALCARECOStreamHcalCalHBHEMuonProducerFilterOutPath)
| import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Run2_2018_cff import Run2_2018
process = cms.Process("AlCaHBHEMuon",Run2_2018)
process.load('Configuration.StandardSequences.Services_cff')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load('Configuration.StandardSequences.AlCaRecoStreams_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
from Configuration.AlCa.autoCond import autoCond
process.GlobalTag.globaltag=autoCond['run2_data']
process.load("RecoLocalCalo.EcalRecAlgos.EcalSeverityLevelESProducer_cfi")
if hasattr(process,'MessageLogger'):
process.MessageLogger.HBHEMuon=dict()
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
'file:/eos/cms/store/group/dpg_hcal/comm_hcal/AmanKaur/HLTPhysics/RawtoReco_HLTPhysics/210910_053130/0000/RECO_RAW2DIGI_L1Reco_RECO_ALCA_1.root',
'file:/eos/cms/store/group/dpg_hcal/comm_hcal/AmanKaur/HLTPhysics/RawtoReco_HLTPhysics/210910_053130/0000/RECO_RAW2DIGI_L1Reco_RECO_ALCA_2.root',
)
)
process.ALCARECOStreamHcalCalHBHEMuon = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('pathALCARECOHcalCalHBHEMuonProducerFilter')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('ALCARECO'),
filterName = cms.untracked.string('ALCARECOHcalCalHBHEMuonProducerFilter')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
outputCommands = process.OutALCARECOHcalCalHBHEMuonProducerFilter.outputCommands,
fileName = cms.untracked.string('OutputHBHEMuonProducerFilter.root'),
)
process.alcaHcalHBHEMuonProducer.triggers = []
# Path and EndPath definitions
process.endjob_step = cms.EndPath(process.endOfProcess)
process.ALCARECOStreamHcalCalHBHEMuonProducerFilterOutPath = cms.EndPath(process.ALCARECOStreamHcalCalHBHEMuon)
# Schedule definition
process.schedule = cms.Schedule(process.pathALCARECOHcalCalHBHEMuonProducerFilter,process.endjob_step,process.ALCARECOStreamHcalCalHBHEMuonProducerFilterOutPath)
| en | 0.651961 | # Path and EndPath definitions # Schedule definition | 1.485067 | 1 |
geo-analytical-app/app.py | ckurze/mongodb-hivemq-iot-demo | 2 | 6631792 | <filename>geo-analytical-app/app.py
# Nice introduction to maps in plotly: https://medium.com/analytics-vidhya/introduction-to-interactive-geoplots-with-plotly-and-mapbox-9249889358eb
import os
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output
import plotly.graph_objects as go
import pandas as pd
import pymongo
import json
import paho.mqtt.client as mqtt
MQTT_HOST = os.environ['MQTT_HOST'] if 'MQTT_HOST' in os.environ else None
if MQTT_HOST == None:
raise ValueError('No MQTT Broker provided. Will exit.')
exit(-1)
MONGO_URI = os.environ['MONGO_URI'] if 'MONGO_URI' in os.environ else None
if MONGO_URI == None:
raise ValueError('No MongoDB Cluster provided. Will exit.')
exit(-1)
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css', dbc.themes.BOOTSTRAP]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
server = app.server
mongo_client = pymongo.MongoClient(MONGO_URI)
db = mongo_client.geotruck
status_coll = db.status
warehouse_coll = db.warehouse
def get_warehouses():
return pd.DataFrame.from_records(warehouse_coll.aggregate([
{
'$project': {
'_id': 0,
'name': '$properties.name',
'street': '$properties.addr:street',
'zip': '$properties.addr:postcode',
'city': '$properties.addr:city',
'lon': {
'$arrayElemAt': [
'$geometry.coordinates', 0
]
},
'lat': {
'$arrayElemAt': [
'$geometry.coordinates', 1
]
}
}
}
]))
def get_distinct_trucks():
return list(status_coll.aggregate([
# Hint to leverage the index
{ '$sort': { 'truck': 1 } },
{ '$group': { '_id': '$truck' } }
]))
def get_truck_trace(truck = 'vehicles/trucks/truck-00001/location', route = None):
print('get_truck_trace')
print('Truck: ' + str(truck))
print('route: ' + str(route))
match = { '$match': {} }
if truck != None:
if isinstance(truck, list):
match['$match']['truck'] = { '$in': truck }
else:
match['$match']['truck'] = truck
if route != None:
if isinstance(route, list):
match['$match']['route'] = { '$in': route }
else:
match['$match']['route'] = route
df = pd.DataFrame.from_records(status_coll.aggregate([
match,
{
'$sort': {
'truck': 1,
'min_ts': 1
}
}, {
'$unwind': {
'path': '$m'
}
}, {
'$project': {
'_id': 0,
'truck': { '$substrCP': [ '$truck', 22, 5] },
'routeId': '$routeId',
'ts': '$m.ts',
'geo': '$m.geo',
'lon': {
'$arrayElemAt': [
'$m.geo.coordinates', 0
]
},
'lat': {
'$arrayElemAt': [
'$m.geo.coordinates', 1
]
},
'speed': '$m.speed',
'speedLimit': '$m.speedLimit',
'break': '$m.break'
}
}
]))
df.set_index(keys=['truck'], drop=False, inplace=True)
return df
def get_truck_routes(truck = ['vehicles/trucks/truck-00001/location']):
return list(status_coll.aggregate([
{ '$match': { 'truck': { '$in': truck } } },
{ '$sort': {
'truck': 1,
'routeId': 1,
'min_ts': 1
}},
{ '$unwind': { 'path': '$m' } },
{ '$group': {
'_id': {
'truck': '$truck',
'routeId': '$routeId'
},
'from': { '$first': '$m' },
'to': { '$last': '$m' },
'lineString': { '$push': '$m.geo.coordinates' }
}},
{ '$project': {
'_id': 0,
'truck': '$_id.truck',
'routeId': '$_id.routeId',
'min_ts': '$from.ts',
'max_ts': '$to.ts',
'lon_from': { '$arrayElemAt': [ '$from.geo.coordinates', 0 ] },
'lat_from': { '$arrayElemAt': [ '$from.geo.coordinates', 1 ] },
'lon_to': { '$arrayElemAt': [ '$to.geo.coordinates', 0 ] },
'lat_to': { '$arrayElemAt': [ '$to.geo.coordinates', 1 ] },
'geometry': {
'type': 'LineString',
'coordinates': '$lineString'
}
}},
{ '$sort': {
'truck': 1,
'min_ts': 1
}}
], allowDiskUse=True))
# Data for real-time map
current_truck_locations = {} #pd.DataFrame(columns=['truck', 'lat', 'lon', 'routeId', 'speed', 'speedLimit', 'break'])
#current_truck_locations.set_index(keys=['truck'], drop=False, inplace=True)
# The callback for when the client receives a CONNACK response from the MQTT server.
def on_connect(client, userdata, flags, rc):
# print('Connected to MQTT broker with result code ' + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
# We want to subscribe to the status topic of all stations
client.subscribe('vehicles/trucks/#')
# The callback for when a PUBLISH message is received from the MQTT server.
# Optimization: As we receive many messages in one shot, the results should be processed in a batched manner.
def on_message(client, userdata, message):
global current_truck_locations
# print('Received message ' + str(message.payload) + ' on topic ' + message.topic + ' with QoS ' + str(message.qos))
payload = json.loads(message.payload)
truckId = message.topic[22:27]
# Did we mix up lon/lat here?
payload['lat'] = payload['location']['lon']
payload['lon'] = payload['location']['lat']
payload.pop('location')
payload['truck'] = truckId
current_truck_locations[truckId] = payload
# Setup MQTT broker connectionci
client = mqtt.Client(client_id='geo-subscriber-realtime-map')
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_HOST, 1883, 60)
warehouses = get_warehouses()
trucks = get_distinct_trucks()
current_truck_trace = get_truck_trace()
# The Map to visualize the routes
data = None # mapbox_chart_data()
layout = go.Layout(autosize=True,
mapbox= dict(zoom=5,
center= dict(lat=51.5368,
lon=10.5685),
style="open-street-map"),
width=750,
height=900)
fig = go.Figure(layout=layout, data=data)
# Real-Time Map
data_realtime = None
layout_realtime = layout
fig_realtime = go.Figure(layout=layout_realtime, data=data_realtime)
app.layout = html.Div([
html.H1(children='Fleet Manager - Analytics Dashboard', className='display-1 col-12 text-center'),
html.Div(className='container', children=[
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Real-time Map')
]),
html.Div(className='row', children=[
dcc.Graph(id='realtime-map', figure=fig_realtime, className='col-12 text-center')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', id='trucks-in-break-header', children='Trucks currently taking a break')
]),
html.Div(className='row', children=[
html.Div(id='trucks-in-break', children='')
]),
html.Div(className='row', children=[
html.Div(className='col-12 text-center', children='')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Choose individual Trucks')
]),
html.Div(className='row', children=[
html.Div(className='col-12', children=dcc.Dropdown(
id='truck-dropdown',
options=[ { 'label': truck['_id'][22:27], 'value': truck['_id'] } for truck in trucks ],
value=['vehicles/trucks/truck-00001/location'],
multi=True
))
]),
html.Div(className='row', children=[
dcc.Graph(id='main-map', figure=fig, className='col-12 text-center'),
]),
html.Div(className='row', children=[
html.Div(id='main-map-metadata', className='col-12', children='')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Route Information')
]),
html.Div(className='row', children=[
html.Div(id='truck-routes-analysis', className='col-12', children='')
]),
# Interval for real-time refresh of map
dcc.Interval(
id='realtime-refresh-interval',
interval=3000, # in milliseconds
n_intervals=0
)
]),
])
# Handle Changes of Truck Dropdown
@app.callback(
[Output('main-map', 'figure'),
Output('main-map-metadata', 'children'),
Output('truck-routes-analysis', 'children')],
[Input('truck-dropdown', 'value')])
def update_main_map(selection):
global current_truck_trace
current_truck_trace = get_truck_trace(truck=selection)
data = [
go.Scattermapbox(
name='Warehouses',
lat= warehouses['lat'] ,
lon= warehouses['lon'],
customdata = warehouses['city'],
mode='markers',
marker=dict(
size= 9,
color = 'gold',
opacity = .2,
),
)]
current_trucks = current_truck_trace['truck'].unique().tolist()
for t in current_trucks:
df_t = current_truck_trace.loc[current_truck_trace.truck==t]
df_t['text'] = '<b>Truck</b> ' + df_t['truck'] + '<br /><b>RouteID</b> ' + df_t['routeId'] + '<br /><b>Timestamp</b> ' + df_t['ts'].astype(str) + '<br /><b>Current Speed</b> ' + df_t['speed'].astype(str) + '<br /><b>Takes Break</b> ' + df_t['break'].astype(str)
data.append(
go.Scattermapbox(
name='Truck ' + t,
lat= df_t['lat'] ,
lon= df_t['lon'],
customdata = df_t['truck'],
text = df_t['text'],
mode='markers',
marker=dict(
size= 9,
opacity = .8,
)
))
current_truck_routes = get_truck_routes(truck=selection)
#print(current_truck_routes)
route_analysis_rows = []
for rt in current_truck_routes:
route_analysis_rows.append(
html.Tr([
html.Td(rt['truck']),
html.Td(rt['routeId']),
html.Td('(' + str(rt['lat_from']) + ',' + str(rt['lon_from']) + ')'),
html.Td('(' + str(rt['lat_to']) + ',' + str(rt['lon_to']) + ')'),
html.Td('(' + str(rt['min_ts'])),
html.Td('(' + str(rt['max_ts'])),
]))
route_analysis_rows.insert(0,
html.Tr([
html.Th('Truck'),
html.Th('Route ID'),
html.Th('From'),
html.Th('To'),
html.Th('Start Time'),
html.Th('Arrival Time')
]))
route_analysis_table = html.Table(children=route_analysis_rows)
# df_route_analysis = current_truck_trace.sort_values(by=['routeId','ts'])
# df_route_analysis['time_diff'] = df_route_analysis['ts'].diff()
# df_route_analysis.loc[df_route_analysis.routeId != df_route_analysis.routeId.shift(), 'time_diff'] = None
# print(df_route_analysis)
return go.Figure(layout=layout, data=data), 'Visualizing ' + str(len(current_truck_trace)) + ' data points.', route_analysis_table
# Realtime refresh of map every second
@app.callback(Output('realtime-map', 'figure'),
[Input('realtime-refresh-interval', 'n_intervals')])
def update_graph_live(n):
global current_truck_locations
df_locations = pd.DataFrame.from_records(list(current_truck_locations.values()))
if not 'truck' in df_locations.columns:
print('truck not in df')
return go.Figure(layout=layout_realtime, data=[])
df_locations.sort_values(by=['truck'], inplace=True)
df_locations['text'] = '<b>Truck</b> ' + df_locations['truck'] + '<br /><b>Current Speed</b> ' + df_locations['speed'].astype(str) + '<br /><b>Takes Break</b> ' + df_locations['break'].astype(str)
#print(df_locations)
data = []
current_trucks = df_locations['truck'].unique().tolist()
# print(current_trucks)
for t in current_trucks:
df_t = df_locations.loc[df_locations.truck==t]
data.append(
go.Scattermapbox(
name='Truck ' + t,
lat= df_t['lat'] ,
lon= df_t['lon'],
text = df_t['text'],
mode='markers',
marker=dict(
size= 9,
opacity = .8,
)
))
return go.Figure(layout=layout_realtime, data=data)
# Current Trucks taking a break
@app.callback([Output('trucks-in-break-header', 'children'),
Output('trucks-in-break', 'children')],
[Input('realtime-refresh-interval', 'n_intervals')])
def update_trucks_break(n):
global current_truck_locations
trucks_in_break = []
for cl in current_truck_locations.values():
if cl['break'] == True:
trucks_in_break.append(cl['truck'])
trucks_in_break.sort()
return str(len(trucks_in_break)) + ' Trucks currently taking a break', ', '.join(trucks_in_break)
if __name__ == '__main__':
client.loop_start()
app.run_server(debug=True)
| <filename>geo-analytical-app/app.py
# Nice introduction to maps in plotly: https://medium.com/analytics-vidhya/introduction-to-interactive-geoplots-with-plotly-and-mapbox-9249889358eb
import os
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output
import plotly.graph_objects as go
import pandas as pd
import pymongo
import json
import paho.mqtt.client as mqtt
MQTT_HOST = os.environ['MQTT_HOST'] if 'MQTT_HOST' in os.environ else None
if MQTT_HOST == None:
raise ValueError('No MQTT Broker provided. Will exit.')
exit(-1)
MONGO_URI = os.environ['MONGO_URI'] if 'MONGO_URI' in os.environ else None
if MONGO_URI == None:
raise ValueError('No MongoDB Cluster provided. Will exit.')
exit(-1)
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css', dbc.themes.BOOTSTRAP]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
server = app.server
mongo_client = pymongo.MongoClient(MONGO_URI)
db = mongo_client.geotruck
status_coll = db.status
warehouse_coll = db.warehouse
def get_warehouses():
return pd.DataFrame.from_records(warehouse_coll.aggregate([
{
'$project': {
'_id': 0,
'name': '$properties.name',
'street': '$properties.addr:street',
'zip': '$properties.addr:postcode',
'city': '$properties.addr:city',
'lon': {
'$arrayElemAt': [
'$geometry.coordinates', 0
]
},
'lat': {
'$arrayElemAt': [
'$geometry.coordinates', 1
]
}
}
}
]))
def get_distinct_trucks():
return list(status_coll.aggregate([
# Hint to leverage the index
{ '$sort': { 'truck': 1 } },
{ '$group': { '_id': '$truck' } }
]))
def get_truck_trace(truck = 'vehicles/trucks/truck-00001/location', route = None):
print('get_truck_trace')
print('Truck: ' + str(truck))
print('route: ' + str(route))
match = { '$match': {} }
if truck != None:
if isinstance(truck, list):
match['$match']['truck'] = { '$in': truck }
else:
match['$match']['truck'] = truck
if route != None:
if isinstance(route, list):
match['$match']['route'] = { '$in': route }
else:
match['$match']['route'] = route
df = pd.DataFrame.from_records(status_coll.aggregate([
match,
{
'$sort': {
'truck': 1,
'min_ts': 1
}
}, {
'$unwind': {
'path': '$m'
}
}, {
'$project': {
'_id': 0,
'truck': { '$substrCP': [ '$truck', 22, 5] },
'routeId': '$routeId',
'ts': '$m.ts',
'geo': '$m.geo',
'lon': {
'$arrayElemAt': [
'$m.geo.coordinates', 0
]
},
'lat': {
'$arrayElemAt': [
'$m.geo.coordinates', 1
]
},
'speed': '$m.speed',
'speedLimit': '$m.speedLimit',
'break': '$m.break'
}
}
]))
df.set_index(keys=['truck'], drop=False, inplace=True)
return df
def get_truck_routes(truck = ['vehicles/trucks/truck-00001/location']):
return list(status_coll.aggregate([
{ '$match': { 'truck': { '$in': truck } } },
{ '$sort': {
'truck': 1,
'routeId': 1,
'min_ts': 1
}},
{ '$unwind': { 'path': '$m' } },
{ '$group': {
'_id': {
'truck': '$truck',
'routeId': '$routeId'
},
'from': { '$first': '$m' },
'to': { '$last': '$m' },
'lineString': { '$push': '$m.geo.coordinates' }
}},
{ '$project': {
'_id': 0,
'truck': '$_id.truck',
'routeId': '$_id.routeId',
'min_ts': '$from.ts',
'max_ts': '$to.ts',
'lon_from': { '$arrayElemAt': [ '$from.geo.coordinates', 0 ] },
'lat_from': { '$arrayElemAt': [ '$from.geo.coordinates', 1 ] },
'lon_to': { '$arrayElemAt': [ '$to.geo.coordinates', 0 ] },
'lat_to': { '$arrayElemAt': [ '$to.geo.coordinates', 1 ] },
'geometry': {
'type': 'LineString',
'coordinates': '$lineString'
}
}},
{ '$sort': {
'truck': 1,
'min_ts': 1
}}
], allowDiskUse=True))
# Data for real-time map
current_truck_locations = {} #pd.DataFrame(columns=['truck', 'lat', 'lon', 'routeId', 'speed', 'speedLimit', 'break'])
#current_truck_locations.set_index(keys=['truck'], drop=False, inplace=True)
# The callback for when the client receives a CONNACK response from the MQTT server.
def on_connect(client, userdata, flags, rc):
# print('Connected to MQTT broker with result code ' + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
# We want to subscribe to the status topic of all stations
client.subscribe('vehicles/trucks/#')
# The callback for when a PUBLISH message is received from the MQTT server.
# Optimization: As we receive many messages in one shot, the results should be processed in a batched manner.
def on_message(client, userdata, message):
global current_truck_locations
# print('Received message ' + str(message.payload) + ' on topic ' + message.topic + ' with QoS ' + str(message.qos))
payload = json.loads(message.payload)
truckId = message.topic[22:27]
# Did we mix up lon/lat here?
payload['lat'] = payload['location']['lon']
payload['lon'] = payload['location']['lat']
payload.pop('location')
payload['truck'] = truckId
current_truck_locations[truckId] = payload
# Setup MQTT broker connectionci
client = mqtt.Client(client_id='geo-subscriber-realtime-map')
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_HOST, 1883, 60)
warehouses = get_warehouses()
trucks = get_distinct_trucks()
current_truck_trace = get_truck_trace()
# The Map to visualize the routes
data = None # mapbox_chart_data()
layout = go.Layout(autosize=True,
mapbox= dict(zoom=5,
center= dict(lat=51.5368,
lon=10.5685),
style="open-street-map"),
width=750,
height=900)
fig = go.Figure(layout=layout, data=data)
# Real-Time Map
data_realtime = None
layout_realtime = layout
fig_realtime = go.Figure(layout=layout_realtime, data=data_realtime)
app.layout = html.Div([
html.H1(children='Fleet Manager - Analytics Dashboard', className='display-1 col-12 text-center'),
html.Div(className='container', children=[
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Real-time Map')
]),
html.Div(className='row', children=[
dcc.Graph(id='realtime-map', figure=fig_realtime, className='col-12 text-center')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', id='trucks-in-break-header', children='Trucks currently taking a break')
]),
html.Div(className='row', children=[
html.Div(id='trucks-in-break', children='')
]),
html.Div(className='row', children=[
html.Div(className='col-12 text-center', children='')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Choose individual Trucks')
]),
html.Div(className='row', children=[
html.Div(className='col-12', children=dcc.Dropdown(
id='truck-dropdown',
options=[ { 'label': truck['_id'][22:27], 'value': truck['_id'] } for truck in trucks ],
value=['vehicles/trucks/truck-00001/location'],
multi=True
))
]),
html.Div(className='row', children=[
dcc.Graph(id='main-map', figure=fig, className='col-12 text-center'),
]),
html.Div(className='row', children=[
html.Div(id='main-map-metadata', className='col-12', children='')
]),
html.Div(className='row', children=[
html.H3(className='col-12 text-center', children='Route Information')
]),
html.Div(className='row', children=[
html.Div(id='truck-routes-analysis', className='col-12', children='')
]),
# Interval for real-time refresh of map
dcc.Interval(
id='realtime-refresh-interval',
interval=3000, # in milliseconds
n_intervals=0
)
]),
])
# Handle Changes of Truck Dropdown
@app.callback(
[Output('main-map', 'figure'),
Output('main-map-metadata', 'children'),
Output('truck-routes-analysis', 'children')],
[Input('truck-dropdown', 'value')])
def update_main_map(selection):
global current_truck_trace
current_truck_trace = get_truck_trace(truck=selection)
data = [
go.Scattermapbox(
name='Warehouses',
lat= warehouses['lat'] ,
lon= warehouses['lon'],
customdata = warehouses['city'],
mode='markers',
marker=dict(
size= 9,
color = 'gold',
opacity = .2,
),
)]
current_trucks = current_truck_trace['truck'].unique().tolist()
for t in current_trucks:
df_t = current_truck_trace.loc[current_truck_trace.truck==t]
df_t['text'] = '<b>Truck</b> ' + df_t['truck'] + '<br /><b>RouteID</b> ' + df_t['routeId'] + '<br /><b>Timestamp</b> ' + df_t['ts'].astype(str) + '<br /><b>Current Speed</b> ' + df_t['speed'].astype(str) + '<br /><b>Takes Break</b> ' + df_t['break'].astype(str)
data.append(
go.Scattermapbox(
name='Truck ' + t,
lat= df_t['lat'] ,
lon= df_t['lon'],
customdata = df_t['truck'],
text = df_t['text'],
mode='markers',
marker=dict(
size= 9,
opacity = .8,
)
))
current_truck_routes = get_truck_routes(truck=selection)
#print(current_truck_routes)
route_analysis_rows = []
for rt in current_truck_routes:
route_analysis_rows.append(
html.Tr([
html.Td(rt['truck']),
html.Td(rt['routeId']),
html.Td('(' + str(rt['lat_from']) + ',' + str(rt['lon_from']) + ')'),
html.Td('(' + str(rt['lat_to']) + ',' + str(rt['lon_to']) + ')'),
html.Td('(' + str(rt['min_ts'])),
html.Td('(' + str(rt['max_ts'])),
]))
route_analysis_rows.insert(0,
html.Tr([
html.Th('Truck'),
html.Th('Route ID'),
html.Th('From'),
html.Th('To'),
html.Th('Start Time'),
html.Th('Arrival Time')
]))
route_analysis_table = html.Table(children=route_analysis_rows)
# df_route_analysis = current_truck_trace.sort_values(by=['routeId','ts'])
# df_route_analysis['time_diff'] = df_route_analysis['ts'].diff()
# df_route_analysis.loc[df_route_analysis.routeId != df_route_analysis.routeId.shift(), 'time_diff'] = None
# print(df_route_analysis)
return go.Figure(layout=layout, data=data), 'Visualizing ' + str(len(current_truck_trace)) + ' data points.', route_analysis_table
# Realtime refresh of map every second
@app.callback(Output('realtime-map', 'figure'),
[Input('realtime-refresh-interval', 'n_intervals')])
def update_graph_live(n):
global current_truck_locations
df_locations = pd.DataFrame.from_records(list(current_truck_locations.values()))
if not 'truck' in df_locations.columns:
print('truck not in df')
return go.Figure(layout=layout_realtime, data=[])
df_locations.sort_values(by=['truck'], inplace=True)
df_locations['text'] = '<b>Truck</b> ' + df_locations['truck'] + '<br /><b>Current Speed</b> ' + df_locations['speed'].astype(str) + '<br /><b>Takes Break</b> ' + df_locations['break'].astype(str)
#print(df_locations)
data = []
current_trucks = df_locations['truck'].unique().tolist()
# print(current_trucks)
for t in current_trucks:
df_t = df_locations.loc[df_locations.truck==t]
data.append(
go.Scattermapbox(
name='Truck ' + t,
lat= df_t['lat'] ,
lon= df_t['lon'],
text = df_t['text'],
mode='markers',
marker=dict(
size= 9,
opacity = .8,
)
))
return go.Figure(layout=layout_realtime, data=data)
# Current Trucks taking a break
@app.callback([Output('trucks-in-break-header', 'children'),
Output('trucks-in-break', 'children')],
[Input('realtime-refresh-interval', 'n_intervals')])
def update_trucks_break(n):
global current_truck_locations
trucks_in_break = []
for cl in current_truck_locations.values():
if cl['break'] == True:
trucks_in_break.append(cl['truck'])
trucks_in_break.sort()
return str(len(trucks_in_break)) + ' Trucks currently taking a break', ', '.join(trucks_in_break)
if __name__ == '__main__':
client.loop_start()
app.run_server(debug=True)
| en | 0.70264 | # Nice introduction to maps in plotly: https://medium.com/analytics-vidhya/introduction-to-interactive-geoplots-with-plotly-and-mapbox-9249889358eb # Hint to leverage the index # Data for real-time map #pd.DataFrame(columns=['truck', 'lat', 'lon', 'routeId', 'speed', 'speedLimit', 'break']) #current_truck_locations.set_index(keys=['truck'], drop=False, inplace=True) # The callback for when the client receives a CONNACK response from the MQTT server. # print('Connected to MQTT broker with result code ' + str(rc)) # Subscribing in on_connect() means that if we lose the connection and # reconnect then subscriptions will be renewed. # We want to subscribe to the status topic of all stations #') # The callback for when a PUBLISH message is received from the MQTT server. # Optimization: As we receive many messages in one shot, the results should be processed in a batched manner. # print('Received message ' + str(message.payload) + ' on topic ' + message.topic + ' with QoS ' + str(message.qos)) # Did we mix up lon/lat here? # Setup MQTT broker connectionci # The Map to visualize the routes # mapbox_chart_data() # Real-Time Map # Interval for real-time refresh of map # in milliseconds # Handle Changes of Truck Dropdown #print(current_truck_routes) # df_route_analysis = current_truck_trace.sort_values(by=['routeId','ts']) # df_route_analysis['time_diff'] = df_route_analysis['ts'].diff() # df_route_analysis.loc[df_route_analysis.routeId != df_route_analysis.routeId.shift(), 'time_diff'] = None # print(df_route_analysis) # Realtime refresh of map every second #print(df_locations) # print(current_trucks) # Current Trucks taking a break | 2.6509 | 3 |
src/homepage/views/index.py | cruzaria/DMRP | 0 | 6631793 | from django.conf import settings
from django_mako_plus import view_function, jscontext
from datetime import datetime, timedelta
@view_function
def process_request(request):
context = {
jscontext('now'): datetime.now(),
}
return request.dmp.render('index.html', context)
@view_function
def gettime(request):
context = {
'now': datetime.now(),
}
return request.dmp.render('index.gettime.html', context)
# return CSV
#return request.dmp.render('my_csv.html', {}, content_type='text/csv')
# return a custom error page
#return request.dmp.render('custom_error_page.html', {}, status=404)
# specify a different template charset (or set globally in settings.py)
#return request.dmp.render('im_old.html', {}, content_type='cp1252')
#mystr = render_template(request, 'homepage', 'custom.html', context, subdir="customsubdir")
# from django_mako_plus import get_template
# template = get_template('homepage', 'index.html')
# def process_request(request, purchase:Purchase):
# the `purchase` variable has already been pulled from the database
#django way
#return render(request, 'homepage / index.html', context, using='django_mako_plus')
#if need Template
#return TemplateResponse(request, dmp_get_template('homepage', 'index.html'), context)
# replace 'homepage' with the name of any DMP-enabled app:
#return HttpResponse(render_template(request, 'homepage', 'index.html', context))
# replace 'homepage' with the name of any DMP-enabled app:
#return render(request, 'homepage/index.html', context)
# @view_function
# def process_request(request, hrs: int, mins: int, forward: bool=True):
# delta = timedelta(hours=hrs, minutes=mins)
# if forward:
# now = datetime.now() + delta
# else:
# now = datetime.now() - delta
# context = {
# 'now': now,
# }
# return request.dmp.render('index.html', context)
| from django.conf import settings
from django_mako_plus import view_function, jscontext
from datetime import datetime, timedelta
@view_function
def process_request(request):
context = {
jscontext('now'): datetime.now(),
}
return request.dmp.render('index.html', context)
@view_function
def gettime(request):
context = {
'now': datetime.now(),
}
return request.dmp.render('index.gettime.html', context)
# return CSV
#return request.dmp.render('my_csv.html', {}, content_type='text/csv')
# return a custom error page
#return request.dmp.render('custom_error_page.html', {}, status=404)
# specify a different template charset (or set globally in settings.py)
#return request.dmp.render('im_old.html', {}, content_type='cp1252')
#mystr = render_template(request, 'homepage', 'custom.html', context, subdir="customsubdir")
# from django_mako_plus import get_template
# template = get_template('homepage', 'index.html')
# def process_request(request, purchase:Purchase):
# the `purchase` variable has already been pulled from the database
#django way
#return render(request, 'homepage / index.html', context, using='django_mako_plus')
#if need Template
#return TemplateResponse(request, dmp_get_template('homepage', 'index.html'), context)
# replace 'homepage' with the name of any DMP-enabled app:
#return HttpResponse(render_template(request, 'homepage', 'index.html', context))
# replace 'homepage' with the name of any DMP-enabled app:
#return render(request, 'homepage/index.html', context)
# @view_function
# def process_request(request, hrs: int, mins: int, forward: bool=True):
# delta = timedelta(hours=hrs, minutes=mins)
# if forward:
# now = datetime.now() + delta
# else:
# now = datetime.now() - delta
# context = {
# 'now': now,
# }
# return request.dmp.render('index.html', context)
| en | 0.303878 | # return CSV #return request.dmp.render('my_csv.html', {}, content_type='text/csv') # return a custom error page #return request.dmp.render('custom_error_page.html', {}, status=404) # specify a different template charset (or set globally in settings.py) #return request.dmp.render('im_old.html', {}, content_type='cp1252') #mystr = render_template(request, 'homepage', 'custom.html', context, subdir="customsubdir") # from django_mako_plus import get_template # template = get_template('homepage', 'index.html') # def process_request(request, purchase:Purchase): # the `purchase` variable has already been pulled from the database #django way #return render(request, 'homepage / index.html', context, using='django_mako_plus') #if need Template #return TemplateResponse(request, dmp_get_template('homepage', 'index.html'), context) # replace 'homepage' with the name of any DMP-enabled app: #return HttpResponse(render_template(request, 'homepage', 'index.html', context)) # replace 'homepage' with the name of any DMP-enabled app: #return render(request, 'homepage/index.html', context) # @view_function # def process_request(request, hrs: int, mins: int, forward: bool=True): # delta = timedelta(hours=hrs, minutes=mins) # if forward: # now = datetime.now() + delta # else: # now = datetime.now() - delta # context = { # 'now': now, # } # return request.dmp.render('index.html', context) | 2.031145 | 2 |
flask_service/source_files/endpoint.py | Project-semyonov/adam | 0 | 6631794 |
from flask import Flask
from flask_restful import Api, Resource, reqparse
import time
import json
from datetime import datetime, timedelta
app = Flask(__name__)
api = Api(app)
epoch = time.time()
time_start = (datetime.now()-timedelta(hours=.5)).timestamp()
print(type(time_start))
time_end = datetime.now().timestamp()
print(type(time_end))
with open('temperature.json','r') as myfile:
temperatures = json.load(myfile)
# therm_list = [{x:y} for x in range(time_start, time_end)]
class Temp(Resource):
def get(self):
for item in temperatures:
time_string = list(item)[0]
print(time_string)
print(type(time_string))
if time_start < float(time_string) < time_end:
return temperatures, 200
return 'Nothing Here', 404
# def put(self, temp):
# pass
# def post(self, temp):
# pass
# def delete(self, temp):
# pass
#
api.add_resource(Temp, '/temp/all')
app.run(host='0.0.0.0', debug=True)
|
from flask import Flask
from flask_restful import Api, Resource, reqparse
import time
import json
from datetime import datetime, timedelta
app = Flask(__name__)
api = Api(app)
epoch = time.time()
time_start = (datetime.now()-timedelta(hours=.5)).timestamp()
print(type(time_start))
time_end = datetime.now().timestamp()
print(type(time_end))
with open('temperature.json','r') as myfile:
temperatures = json.load(myfile)
# therm_list = [{x:y} for x in range(time_start, time_end)]
class Temp(Resource):
def get(self):
for item in temperatures:
time_string = list(item)[0]
print(time_string)
print(type(time_string))
if time_start < float(time_string) < time_end:
return temperatures, 200
return 'Nothing Here', 404
# def put(self, temp):
# pass
# def post(self, temp):
# pass
# def delete(self, temp):
# pass
#
api.add_resource(Temp, '/temp/all')
app.run(host='0.0.0.0', debug=True)
| en | 0.614656 | # therm_list = [{x:y} for x in range(time_start, time_end)] # def put(self, temp): # pass # def post(self, temp): # pass # def delete(self, temp): # pass # | 2.940197 | 3 |
CIM14/CDPSM/Balanced/IEC61970/Core/Equipment.py | MaximeBaudette/PyCIM | 58 | 6631795 | # Copyright (C) 2010-2011 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CDPSM.Balanced.IEC61970.Core.PowerSystemResource import PowerSystemResource
class Equipment(PowerSystemResource):
"""The parts of a power system that are physical devices, electronic or mechanical
"""
def __init__(self, normaIlyInService=False, EquipmentContainer=None, *args, **kw_args):
"""Initialises a new 'Equipment' instance.
@param normaIlyInService: The equipment is normally in service.
@param EquipmentContainer: The association is used in the naming hierarchy.
"""
#: The equipment is normally in service.
self.normaIlyInService = normaIlyInService
self._EquipmentContainer = None
self.EquipmentContainer = EquipmentContainer
super(Equipment, self).__init__(*args, **kw_args)
_attrs = ["normaIlyInService"]
_attr_types = {"normaIlyInService": bool}
_defaults = {"normaIlyInService": False}
_enums = {}
_refs = ["EquipmentContainer"]
_many_refs = []
def getEquipmentContainer(self):
"""The association is used in the naming hierarchy.
"""
return self._EquipmentContainer
def setEquipmentContainer(self, value):
if self._EquipmentContainer is not None:
filtered = [x for x in self.EquipmentContainer.Equipments if x != self]
self._EquipmentContainer._Equipments = filtered
self._EquipmentContainer = value
if self._EquipmentContainer is not None:
if self not in self._EquipmentContainer._Equipments:
self._EquipmentContainer._Equipments.append(self)
EquipmentContainer = property(getEquipmentContainer, setEquipmentContainer)
| # Copyright (C) 2010-2011 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CDPSM.Balanced.IEC61970.Core.PowerSystemResource import PowerSystemResource
class Equipment(PowerSystemResource):
"""The parts of a power system that are physical devices, electronic or mechanical
"""
def __init__(self, normaIlyInService=False, EquipmentContainer=None, *args, **kw_args):
"""Initialises a new 'Equipment' instance.
@param normaIlyInService: The equipment is normally in service.
@param EquipmentContainer: The association is used in the naming hierarchy.
"""
#: The equipment is normally in service.
self.normaIlyInService = normaIlyInService
self._EquipmentContainer = None
self.EquipmentContainer = EquipmentContainer
super(Equipment, self).__init__(*args, **kw_args)
_attrs = ["normaIlyInService"]
_attr_types = {"normaIlyInService": bool}
_defaults = {"normaIlyInService": False}
_enums = {}
_refs = ["EquipmentContainer"]
_many_refs = []
def getEquipmentContainer(self):
"""The association is used in the naming hierarchy.
"""
return self._EquipmentContainer
def setEquipmentContainer(self, value):
if self._EquipmentContainer is not None:
filtered = [x for x in self.EquipmentContainer.Equipments if x != self]
self._EquipmentContainer._Equipments = filtered
self._EquipmentContainer = value
if self._EquipmentContainer is not None:
if self not in self._EquipmentContainer._Equipments:
self._EquipmentContainer._Equipments.append(self)
EquipmentContainer = property(getEquipmentContainer, setEquipmentContainer)
| en | 0.802352 | # Copyright (C) 2010-2011 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. The parts of a power system that are physical devices, electronic or mechanical Initialises a new 'Equipment' instance. @param normaIlyInService: The equipment is normally in service. @param EquipmentContainer: The association is used in the naming hierarchy. #: The equipment is normally in service. The association is used in the naming hierarchy. | 1.66806 | 2 |
neutron/tests/functional/agent/l2/extensions/test_ovs_agent_qos_extension.py | EwaldvanGeffen/neutron | 1 | 6631796 | # Copyright (c) 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from neutron_lib import constants
from oslo_utils import uuidutils
import testscenarios
from neutron.api.rpc.callbacks.consumer import registry as consumer_reg
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.objects.qos import policy
from neutron.objects.qos import rule
from neutron.tests.common.agents import l2_extensions
from neutron.tests.functional.agent.l2 import base
load_tests = testscenarios.load_tests_apply_scenarios
TEST_POLICY_ID1 = "a2d72369-4246-4f19-bd3c-af51ec8d70cd"
TEST_POLICY_ID2 = "46ebaec0-0570-43ac-82f6-60d2b03168c5"
TEST_DSCP_MARK_1 = 14
TEST_DSCP_MARK_2 = 30
class OVSAgentQoSExtensionTestFramework(base.OVSAgentTestFramework):
test_dscp_marking_rule_1 = rule.QosDscpMarkingRule(
context=None,
qos_policy_id=TEST_POLICY_ID1,
id="9f126d84-551a-4dcf-bb01-0e9c0df0c793",
dscp_mark=TEST_DSCP_MARK_1)
test_dscp_marking_rule_2 = rule.QosDscpMarkingRule(
context=None,
qos_policy_id=TEST_POLICY_ID2,
id="7f126d84-551a-4dcf-bb01-0e9c0df0c793",
dscp_mark=TEST_DSCP_MARK_2)
test_bw_limit_rule_1 = rule.QosBandwidthLimitRule(
context=None,
qos_policy_id=TEST_POLICY_ID1,
id="5f126d84-551a-4dcf-bb01-0e9c0df0c793",
max_kbps=1000,
max_burst_kbps=10)
test_bw_limit_rule_2 = rule.QosBandwidthLimitRule(
context=None,
qos_policy_id=TEST_POLICY_ID2,
id="fa9128d9-44af-49b2-99bb-96548378ad42",
max_kbps=900,
max_burst_kbps=9)
def setUp(self):
super(OVSAgentQoSExtensionTestFramework, self).setUp()
self.config.set_override('extensions', ['qos'], 'agent')
self._set_pull_mock()
self.set_test_qos_rules(TEST_POLICY_ID1,
[self.test_bw_limit_rule_1,
self.test_dscp_marking_rule_1])
self.set_test_qos_rules(TEST_POLICY_ID2,
[self.test_bw_limit_rule_2,
self.test_dscp_marking_rule_2])
def _set_pull_mock(self):
self.qos_policies = {}
def _pull_mock(context, resource_type, resource_id):
return self.qos_policies[resource_id]
self.pull = mock.patch(
'neutron.api.rpc.handlers.resources_rpc.'
'ResourcesPullRpcApi.pull').start()
self.pull.side_effect = _pull_mock
def set_test_qos_rules(self, policy_id, policy_rules):
"""This function sets the policy test rules to be exposed."""
qos_policy = policy.QosPolicy(
context=None,
project_id=uuidutils.generate_uuid(),
id=policy_id,
name="Test Policy Name",
description="This is a policy for testing purposes",
shared=False,
rules=policy_rules)
qos_policy.obj_reset_changes()
self.qos_policies[policy_id] = qos_policy
def _create_test_port_dict(self, policy_id=None):
port_dict = super(OVSAgentQoSExtensionTestFramework,
self)._create_test_port_dict()
port_dict['qos_policy_id'] = policy_id
port_dict['network_qos_policy_id'] = None
return port_dict
def _get_device_details(self, port, network):
dev = super(OVSAgentQoSExtensionTestFramework,
self)._get_device_details(port, network)
dev['qos_policy_id'] = port['qos_policy_id']
return dev
def _assert_bandwidth_limit_rule_is_set(self, port, rule):
if rule.direction == constants.INGRESS_DIRECTION:
max_rate, burst = (
self.agent.int_br.get_ingress_bw_limit_for_port(
port['vif_name']))
else:
max_rate, burst = (
self.agent.int_br.get_egress_bw_limit_for_port(
port['vif_name']))
self.assertEqual(max_rate, rule.max_kbps)
self.assertEqual(burst, rule.max_burst_kbps)
def _assert_bandwidth_limit_rule_not_set(self, port, rule_direction):
if rule_direction == constants.INGRESS_DIRECTION:
max_rate, burst = (
self.agent.int_br.get_ingress_bw_limit_for_port(
port['vif_name']))
else:
max_rate, burst = (
self.agent.int_br.get_egress_bw_limit_for_port(
port['vif_name']))
self.assertIsNone(max_rate)
self.assertIsNone(burst)
def wait_until_bandwidth_limit_rule_applied(self, port, rule):
if rule and rule.direction == constants.INGRESS_DIRECTION:
l2_extensions.wait_until_ingress_bandwidth_limit_rule_applied(
self.agent.int_br, port['vif_name'], rule)
else:
l2_extensions.wait_until_egress_bandwidth_limit_rule_applied(
self.agent.int_br, port['vif_name'], rule)
def _assert_dscp_marking_rule_is_set(self, port, dscp_rule):
port_num = self.agent.int_br._get_port_val(port['vif_name'], 'ofport')
flows = self.agent.int_br.dump_flows_for(table='0',
in_port=str(port_num))
tos_mark = l2_extensions.extract_mod_nw_tos_action(flows)
self.assertEqual(dscp_rule.dscp_mark << 2, tos_mark)
def _assert_dscp_marking_rule_not_set(self, port):
port_num = self.agent.int_br._get_port_val(port['vif_name'], 'ofport')
flows = self.agent.int_br.dump_flows_for(table='0',
in_port=str(port_num))
tos_mark = l2_extensions.extract_mod_nw_tos_action(flows)
self.assertIsNone(tos_mark)
def wait_until_dscp_marking_rule_applied(self, port, dscp_mark):
l2_extensions.wait_until_dscp_marking_rule_applied_ovs(
self.agent.int_br, port['vif_name'], dscp_mark)
def _create_port_with_qos(self):
port_dict = self._create_test_port_dict()
port_dict['qos_policy_id'] = TEST_POLICY_ID1
self.setup_agent_and_ports([port_dict])
self.wait_until_ports_state(self.ports, up=True)
self.wait_until_bandwidth_limit_rule_applied(port_dict,
self.test_bw_limit_rule_1)
return port_dict
class TestOVSAgentQosExtension(OVSAgentQoSExtensionTestFramework):
scenarios = [
('ingress', {'direction': constants.INGRESS_DIRECTION}),
('egress', {'direction': constants.EGRESS_DIRECTION})
]
def setUp(self):
super(TestOVSAgentQosExtension, self).setUp()
self.test_bw_limit_rule_1.direction = self.direction
self.test_bw_limit_rule_2.direction = self.direction
@property
def reverse_direction(self):
if self.direction == constants.INGRESS_DIRECTION:
return constants.EGRESS_DIRECTION
elif self.direction == constants.EGRESS_DIRECTION:
return constants.INGRESS_DIRECTION
def test_port_creation_with_bandwidth_limit(self):
"""Make sure bandwidth limit rules are set in low level to ports."""
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_bandwidth_limit_rule_is_set(
port, self.test_bw_limit_rule_1)
def test_port_creation_with_bandwidth_limits_both_directions(self):
"""Make sure bandwidth limit rules are set in low level to ports.
This test is checking applying rules for both possible
directions at once
"""
reverse_direction_bw_limit_rule = copy.deepcopy(
self.test_bw_limit_rule_1)
reverse_direction_bw_limit_rule.direction = self.reverse_direction
self.qos_policies[TEST_POLICY_ID1].rules.append(
reverse_direction_bw_limit_rule)
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_bandwidth_limit_rule_is_set(
port, self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_is_set(
port, reverse_direction_bw_limit_rule)
def test_port_creation_with_different_bandwidth_limits(self):
"""Make sure different types of policies end on the right ports."""
port_dicts = self.create_test_ports(amount=3)
port_dicts[0]['qos_policy_id'] = TEST_POLICY_ID1
port_dicts[1]['qos_policy_id'] = TEST_POLICY_ID2
self.setup_agent_and_ports(port_dicts)
self.wait_until_ports_state(self.ports, up=True)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_is_set(self.ports[1],
self.test_bw_limit_rule_2)
self._assert_bandwidth_limit_rule_not_set(self.ports[2],
self.direction)
def test_port_creation_with_dscp_marking(self):
"""Make sure dscp marking rules are set in low level to ports."""
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_dscp_marking_rule_is_set(
port, self.test_dscp_marking_rule_1)
def test_port_creation_with_different_dscp_markings(self):
"""Make sure different types of policies end on the right ports."""
port_dicts = self.create_test_ports(amount=3)
port_dicts[0]['qos_policy_id'] = TEST_POLICY_ID1
port_dicts[1]['qos_policy_id'] = TEST_POLICY_ID2
self.setup_agent_and_ports(port_dicts)
self.wait_until_ports_state(self.ports, up=True)
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_1)
self._assert_dscp_marking_rule_is_set(self.ports[1],
self.test_dscp_marking_rule_2)
self._assert_dscp_marking_rule_not_set(self.ports[2])
def test_simple_port_policy_update(self):
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_1)
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules[0].max_kbps = 500
policy_copy.rules[0].max_burst_kbps = 5
policy_copy.rules[1].dscp_mark = TEST_DSCP_MARK_2
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY,
[policy_copy], events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(self.ports[0],
policy_copy.rules[0])
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
policy_copy.rules[0])
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_2)
def test_simple_port_policy_update_change_bw_limit_direction(self):
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_not_set(self.ports[0],
self.reverse_direction)
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules[0].direction = self.reverse_direction
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY,
[policy_copy], events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(self.ports[0],
policy_copy.rules[0])
self._assert_bandwidth_limit_rule_not_set(self.ports[0],
self.direction)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
policy_copy.rules[0])
def test_port_qos_disassociation(self):
"""Test that qos_policy_id set to None will remove all qos rules from
given port.
"""
port_dict = self._create_port_with_qos()
port_dict['qos_policy_id'] = None
self.agent.port_update(None, port=port_dict)
self.wait_until_bandwidth_limit_rule_applied(port_dict, None)
def test_port_qos_update_policy_id(self):
"""Test that change of qos policy id on given port refreshes all its
rules.
"""
port_dict = self._create_port_with_qos()
port_dict['qos_policy_id'] = TEST_POLICY_ID2
self.agent.port_update(None, port=port_dict)
self.wait_until_bandwidth_limit_rule_applied(port_dict,
self.test_bw_limit_rule_2)
def test_policy_rule_delete(self):
port_dict = self._create_port_with_qos()
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules = list()
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY, [policy_copy],
events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(port_dict, None)
| # Copyright (c) 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from neutron_lib import constants
from oslo_utils import uuidutils
import testscenarios
from neutron.api.rpc.callbacks.consumer import registry as consumer_reg
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.callbacks import resources
from neutron.objects.qos import policy
from neutron.objects.qos import rule
from neutron.tests.common.agents import l2_extensions
from neutron.tests.functional.agent.l2 import base
load_tests = testscenarios.load_tests_apply_scenarios
TEST_POLICY_ID1 = "a2d72369-4246-4f19-bd3c-af51ec8d70cd"
TEST_POLICY_ID2 = "46ebaec0-0570-43ac-82f6-60d2b03168c5"
TEST_DSCP_MARK_1 = 14
TEST_DSCP_MARK_2 = 30
class OVSAgentQoSExtensionTestFramework(base.OVSAgentTestFramework):
test_dscp_marking_rule_1 = rule.QosDscpMarkingRule(
context=None,
qos_policy_id=TEST_POLICY_ID1,
id="9f126d84-551a-4dcf-bb01-0e9c0df0c793",
dscp_mark=TEST_DSCP_MARK_1)
test_dscp_marking_rule_2 = rule.QosDscpMarkingRule(
context=None,
qos_policy_id=TEST_POLICY_ID2,
id="7f126d84-551a-4dcf-bb01-0e9c0df0c793",
dscp_mark=TEST_DSCP_MARK_2)
test_bw_limit_rule_1 = rule.QosBandwidthLimitRule(
context=None,
qos_policy_id=TEST_POLICY_ID1,
id="5f126d84-551a-4dcf-bb01-0e9c0df0c793",
max_kbps=1000,
max_burst_kbps=10)
test_bw_limit_rule_2 = rule.QosBandwidthLimitRule(
context=None,
qos_policy_id=TEST_POLICY_ID2,
id="fa9128d9-44af-49b2-99bb-96548378ad42",
max_kbps=900,
max_burst_kbps=9)
def setUp(self):
super(OVSAgentQoSExtensionTestFramework, self).setUp()
self.config.set_override('extensions', ['qos'], 'agent')
self._set_pull_mock()
self.set_test_qos_rules(TEST_POLICY_ID1,
[self.test_bw_limit_rule_1,
self.test_dscp_marking_rule_1])
self.set_test_qos_rules(TEST_POLICY_ID2,
[self.test_bw_limit_rule_2,
self.test_dscp_marking_rule_2])
def _set_pull_mock(self):
self.qos_policies = {}
def _pull_mock(context, resource_type, resource_id):
return self.qos_policies[resource_id]
self.pull = mock.patch(
'neutron.api.rpc.handlers.resources_rpc.'
'ResourcesPullRpcApi.pull').start()
self.pull.side_effect = _pull_mock
def set_test_qos_rules(self, policy_id, policy_rules):
"""This function sets the policy test rules to be exposed."""
qos_policy = policy.QosPolicy(
context=None,
project_id=uuidutils.generate_uuid(),
id=policy_id,
name="Test Policy Name",
description="This is a policy for testing purposes",
shared=False,
rules=policy_rules)
qos_policy.obj_reset_changes()
self.qos_policies[policy_id] = qos_policy
def _create_test_port_dict(self, policy_id=None):
port_dict = super(OVSAgentQoSExtensionTestFramework,
self)._create_test_port_dict()
port_dict['qos_policy_id'] = policy_id
port_dict['network_qos_policy_id'] = None
return port_dict
def _get_device_details(self, port, network):
dev = super(OVSAgentQoSExtensionTestFramework,
self)._get_device_details(port, network)
dev['qos_policy_id'] = port['qos_policy_id']
return dev
def _assert_bandwidth_limit_rule_is_set(self, port, rule):
if rule.direction == constants.INGRESS_DIRECTION:
max_rate, burst = (
self.agent.int_br.get_ingress_bw_limit_for_port(
port['vif_name']))
else:
max_rate, burst = (
self.agent.int_br.get_egress_bw_limit_for_port(
port['vif_name']))
self.assertEqual(max_rate, rule.max_kbps)
self.assertEqual(burst, rule.max_burst_kbps)
def _assert_bandwidth_limit_rule_not_set(self, port, rule_direction):
if rule_direction == constants.INGRESS_DIRECTION:
max_rate, burst = (
self.agent.int_br.get_ingress_bw_limit_for_port(
port['vif_name']))
else:
max_rate, burst = (
self.agent.int_br.get_egress_bw_limit_for_port(
port['vif_name']))
self.assertIsNone(max_rate)
self.assertIsNone(burst)
def wait_until_bandwidth_limit_rule_applied(self, port, rule):
if rule and rule.direction == constants.INGRESS_DIRECTION:
l2_extensions.wait_until_ingress_bandwidth_limit_rule_applied(
self.agent.int_br, port['vif_name'], rule)
else:
l2_extensions.wait_until_egress_bandwidth_limit_rule_applied(
self.agent.int_br, port['vif_name'], rule)
def _assert_dscp_marking_rule_is_set(self, port, dscp_rule):
port_num = self.agent.int_br._get_port_val(port['vif_name'], 'ofport')
flows = self.agent.int_br.dump_flows_for(table='0',
in_port=str(port_num))
tos_mark = l2_extensions.extract_mod_nw_tos_action(flows)
self.assertEqual(dscp_rule.dscp_mark << 2, tos_mark)
def _assert_dscp_marking_rule_not_set(self, port):
port_num = self.agent.int_br._get_port_val(port['vif_name'], 'ofport')
flows = self.agent.int_br.dump_flows_for(table='0',
in_port=str(port_num))
tos_mark = l2_extensions.extract_mod_nw_tos_action(flows)
self.assertIsNone(tos_mark)
def wait_until_dscp_marking_rule_applied(self, port, dscp_mark):
l2_extensions.wait_until_dscp_marking_rule_applied_ovs(
self.agent.int_br, port['vif_name'], dscp_mark)
def _create_port_with_qos(self):
port_dict = self._create_test_port_dict()
port_dict['qos_policy_id'] = TEST_POLICY_ID1
self.setup_agent_and_ports([port_dict])
self.wait_until_ports_state(self.ports, up=True)
self.wait_until_bandwidth_limit_rule_applied(port_dict,
self.test_bw_limit_rule_1)
return port_dict
class TestOVSAgentQosExtension(OVSAgentQoSExtensionTestFramework):
scenarios = [
('ingress', {'direction': constants.INGRESS_DIRECTION}),
('egress', {'direction': constants.EGRESS_DIRECTION})
]
def setUp(self):
super(TestOVSAgentQosExtension, self).setUp()
self.test_bw_limit_rule_1.direction = self.direction
self.test_bw_limit_rule_2.direction = self.direction
@property
def reverse_direction(self):
if self.direction == constants.INGRESS_DIRECTION:
return constants.EGRESS_DIRECTION
elif self.direction == constants.EGRESS_DIRECTION:
return constants.INGRESS_DIRECTION
def test_port_creation_with_bandwidth_limit(self):
"""Make sure bandwidth limit rules are set in low level to ports."""
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_bandwidth_limit_rule_is_set(
port, self.test_bw_limit_rule_1)
def test_port_creation_with_bandwidth_limits_both_directions(self):
"""Make sure bandwidth limit rules are set in low level to ports.
This test is checking applying rules for both possible
directions at once
"""
reverse_direction_bw_limit_rule = copy.deepcopy(
self.test_bw_limit_rule_1)
reverse_direction_bw_limit_rule.direction = self.reverse_direction
self.qos_policies[TEST_POLICY_ID1].rules.append(
reverse_direction_bw_limit_rule)
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_bandwidth_limit_rule_is_set(
port, self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_is_set(
port, reverse_direction_bw_limit_rule)
def test_port_creation_with_different_bandwidth_limits(self):
"""Make sure different types of policies end on the right ports."""
port_dicts = self.create_test_ports(amount=3)
port_dicts[0]['qos_policy_id'] = TEST_POLICY_ID1
port_dicts[1]['qos_policy_id'] = TEST_POLICY_ID2
self.setup_agent_and_ports(port_dicts)
self.wait_until_ports_state(self.ports, up=True)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_is_set(self.ports[1],
self.test_bw_limit_rule_2)
self._assert_bandwidth_limit_rule_not_set(self.ports[2],
self.direction)
def test_port_creation_with_dscp_marking(self):
"""Make sure dscp marking rules are set in low level to ports."""
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
for port in self.ports:
self._assert_dscp_marking_rule_is_set(
port, self.test_dscp_marking_rule_1)
def test_port_creation_with_different_dscp_markings(self):
"""Make sure different types of policies end on the right ports."""
port_dicts = self.create_test_ports(amount=3)
port_dicts[0]['qos_policy_id'] = TEST_POLICY_ID1
port_dicts[1]['qos_policy_id'] = TEST_POLICY_ID2
self.setup_agent_and_ports(port_dicts)
self.wait_until_ports_state(self.ports, up=True)
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_1)
self._assert_dscp_marking_rule_is_set(self.ports[1],
self.test_dscp_marking_rule_2)
self._assert_dscp_marking_rule_not_set(self.ports[2])
def test_simple_port_policy_update(self):
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_1)
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules[0].max_kbps = 500
policy_copy.rules[0].max_burst_kbps = 5
policy_copy.rules[1].dscp_mark = TEST_DSCP_MARK_2
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY,
[policy_copy], events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(self.ports[0],
policy_copy.rules[0])
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
policy_copy.rules[0])
self._assert_dscp_marking_rule_is_set(self.ports[0],
self.test_dscp_marking_rule_2)
def test_simple_port_policy_update_change_bw_limit_direction(self):
self.setup_agent_and_ports(
port_dicts=self.create_test_ports(amount=1,
policy_id=TEST_POLICY_ID1))
self.wait_until_ports_state(self.ports, up=True)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
self.test_bw_limit_rule_1)
self._assert_bandwidth_limit_rule_not_set(self.ports[0],
self.reverse_direction)
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules[0].direction = self.reverse_direction
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY,
[policy_copy], events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(self.ports[0],
policy_copy.rules[0])
self._assert_bandwidth_limit_rule_not_set(self.ports[0],
self.direction)
self._assert_bandwidth_limit_rule_is_set(self.ports[0],
policy_copy.rules[0])
def test_port_qos_disassociation(self):
"""Test that qos_policy_id set to None will remove all qos rules from
given port.
"""
port_dict = self._create_port_with_qos()
port_dict['qos_policy_id'] = None
self.agent.port_update(None, port=port_dict)
self.wait_until_bandwidth_limit_rule_applied(port_dict, None)
def test_port_qos_update_policy_id(self):
"""Test that change of qos policy id on given port refreshes all its
rules.
"""
port_dict = self._create_port_with_qos()
port_dict['qos_policy_id'] = TEST_POLICY_ID2
self.agent.port_update(None, port=port_dict)
self.wait_until_bandwidth_limit_rule_applied(port_dict,
self.test_bw_limit_rule_2)
def test_policy_rule_delete(self):
port_dict = self._create_port_with_qos()
policy_copy = copy.deepcopy(self.qos_policies[TEST_POLICY_ID1])
policy_copy.rules = list()
context = mock.Mock()
consumer_reg.push(context, resources.QOS_POLICY, [policy_copy],
events.UPDATED)
self.wait_until_bandwidth_limit_rule_applied(port_dict, None)
| en | 0.896847 | # Copyright (c) 2015 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. This function sets the policy test rules to be exposed. Make sure bandwidth limit rules are set in low level to ports. Make sure bandwidth limit rules are set in low level to ports. This test is checking applying rules for both possible directions at once Make sure different types of policies end on the right ports. Make sure dscp marking rules are set in low level to ports. Make sure different types of policies end on the right ports. Test that qos_policy_id set to None will remove all qos rules from given port. Test that change of qos policy id on given port refreshes all its rules. | 1.343497 | 1 |
gym_ucv/__init__.py | mkisantal/gym-ucv-control | 0 | 6631797 | <reponame>mkisantal/gym-ucv-control
from gym.envs.registration import register
register(
id='ucv-v0',
entry_point='gym_ucv.envs:UcvEnv',
)
| from gym.envs.registration import register
register(
id='ucv-v0',
entry_point='gym_ucv.envs:UcvEnv',
) | none | 1 | 1.286423 | 1 |
|
graph_construction/get_diagnosis_strings.py | EmmaRocheteau/eICU-GNN-LSTM | 53 | 6631798 | <reponame>EmmaRocheteau/eICU-GNN-LSTM
import pandas as pd
import json
with open('paths.json', 'r') as f:
eICU_path = json.load(f)["eICU_path"]
train_diagnoses = pd.read_csv('{}train/diagnoses.csv'.format(eICU_path), index_col='patient')
val_diagnoses = pd.read_csv('{}val/diagnoses.csv'.format(eICU_path), index_col='patient')
test_diagnoses = pd.read_csv('{}test/diagnoses.csv'.format(eICU_path), index_col='patient')
diag_strings = train_diagnoses.columns
# some quick cleaning i.e. remove classes and subclasses, get rid of strange words and characters
cleaned_strings = []
for i, diag in enumerate(diag_strings):
diag = diag.replace('groupedapacheadmissiondx', '').replace('apacheadmissiondx', '').replace(' (R)', '') # get rid of non-useful text
cleaned_strings.append(diag)
# hacky way to quickly get indexable list
train_diagnoses.columns = cleaned_strings
cleaned_strings = train_diagnoses.columns
def get_diagnosis_strings(diagnoses_df, partition=''):
with open(eICU_path + partition + '/diagnosis_strings_cleaned.txt', 'w') as f:
for i, row in diagnoses_df.iterrows():
diagnosis_strings = cleaned_strings[row.to_numpy().nonzero()[0]]
patient_strings = []
for diag in diagnosis_strings:
if not any(diag in string for string in diagnosis_strings.drop(diag)): # check in the rest of the strings for overlap
patient_strings.append(diag.replace('_', ' ').replace('|', ' '))
str_to_write = ", ".join(patient_strings)
if str_to_write == "":
str_to_write = "No Diagnoses"
f.write(str_to_write)
f.write("\n")
get_diagnosis_strings(train_diagnoses, partition='train')
get_diagnosis_strings(val_diagnoses, partition='val')
get_diagnosis_strings(test_diagnoses, partition='test') | import pandas as pd
import json
with open('paths.json', 'r') as f:
eICU_path = json.load(f)["eICU_path"]
train_diagnoses = pd.read_csv('{}train/diagnoses.csv'.format(eICU_path), index_col='patient')
val_diagnoses = pd.read_csv('{}val/diagnoses.csv'.format(eICU_path), index_col='patient')
test_diagnoses = pd.read_csv('{}test/diagnoses.csv'.format(eICU_path), index_col='patient')
diag_strings = train_diagnoses.columns
# some quick cleaning i.e. remove classes and subclasses, get rid of strange words and characters
cleaned_strings = []
for i, diag in enumerate(diag_strings):
diag = diag.replace('groupedapacheadmissiondx', '').replace('apacheadmissiondx', '').replace(' (R)', '') # get rid of non-useful text
cleaned_strings.append(diag)
# hacky way to quickly get indexable list
train_diagnoses.columns = cleaned_strings
cleaned_strings = train_diagnoses.columns
def get_diagnosis_strings(diagnoses_df, partition=''):
with open(eICU_path + partition + '/diagnosis_strings_cleaned.txt', 'w') as f:
for i, row in diagnoses_df.iterrows():
diagnosis_strings = cleaned_strings[row.to_numpy().nonzero()[0]]
patient_strings = []
for diag in diagnosis_strings:
if not any(diag in string for string in diagnosis_strings.drop(diag)): # check in the rest of the strings for overlap
patient_strings.append(diag.replace('_', ' ').replace('|', ' '))
str_to_write = ", ".join(patient_strings)
if str_to_write == "":
str_to_write = "No Diagnoses"
f.write(str_to_write)
f.write("\n")
get_diagnosis_strings(train_diagnoses, partition='train')
get_diagnosis_strings(val_diagnoses, partition='val')
get_diagnosis_strings(test_diagnoses, partition='test') | en | 0.890801 | # some quick cleaning i.e. remove classes and subclasses, get rid of strange words and characters # get rid of non-useful text # hacky way to quickly get indexable list # check in the rest of the strings for overlap | 2.75201 | 3 |
bloomcast/main.py | SalishSeaCast/SOG-Bloomcast-Ensemble | 1 | 6631799 | # Copyright 2011-2021 <NAME> and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SoG-bloomcast application.
Operational prediction of the Strait of Georgia spring phytoplankton bloom
This module is connected to the `bloomcast` command via a console_scripts
entry point in setup.py.
"""
import sys
import cliff.app
import cliff.commandmanager
from . import __pkg_metadata__
__all__ = [
'BloomcastApp', 'main',
]
class BloomcastApp(cliff.app.App):
CONSOLE_MESSAGE_FORMAT = '%(levelname)s:%(name)s:%(message)s'
def __init__(self):
app_namespace = 'bloomcast.app'
super(BloomcastApp, self).__init__(
description=__pkg_metadata__.DESCRIPTION,
version=__pkg_metadata__.VERSION,
command_manager=cliff.commandmanager.CommandManager(app_namespace),
)
def main(argv=sys.argv[1:]):
app = BloomcastApp()
return app.run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| # Copyright 2011-2021 <NAME> and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SoG-bloomcast application.
Operational prediction of the Strait of Georgia spring phytoplankton bloom
This module is connected to the `bloomcast` command via a console_scripts
entry point in setup.py.
"""
import sys
import cliff.app
import cliff.commandmanager
from . import __pkg_metadata__
__all__ = [
'BloomcastApp', 'main',
]
class BloomcastApp(cliff.app.App):
CONSOLE_MESSAGE_FORMAT = '%(levelname)s:%(name)s:%(message)s'
def __init__(self):
app_namespace = 'bloomcast.app'
super(BloomcastApp, self).__init__(
description=__pkg_metadata__.DESCRIPTION,
version=__pkg_metadata__.VERSION,
command_manager=cliff.commandmanager.CommandManager(app_namespace),
)
def main(argv=sys.argv[1:]):
app = BloomcastApp()
return app.run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| en | 0.833746 | # Copyright 2011-2021 <NAME> and The University of British Columbia # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. SoG-bloomcast application. Operational prediction of the Strait of Georgia spring phytoplankton bloom This module is connected to the `bloomcast` command via a console_scripts entry point in setup.py. | 1.713961 | 2 |
pive/inputmanager.py | internet-sicherheit/pive | 8 | 6631800 | <gh_stars>1-10
# Copyright (c) 2014 - 2015, <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -*- coding: utf-8 -*-
""" The input manager reads files and strings containing
datasets in json and csv format. The data is automatically
validated and corrected if necessary."""
from . import inputreader as reader
from . import datavalidater as validater
from . import consistenceprofiler as profiler
from . import visualizationmapper as vizmapper
NOT_CONSISTENT_ERR_MSG = "Data is not consistent."
NO_DATA_LOADED_ERR_MSG = "Unexpected data source."
class InputManager(object):
"""Contains and manages the data."""
# Input Managers can try to merge false datapoints or not.
def __init__(self, mergedata=False):
self.__mergedata = mergedata
self.__contains_datefields = False
def read(self, source):
"""Reads the input source."""
inputdata = reader.load_input_source(source)
# Raise an error if the data source is empty or nor readable.
if not inputdata:
raise ValueError(NO_DATA_LOADED_ERR_MSG)
dataset = self.__validate_input(inputdata)
# Raise an error if the dataset is not consistent.
if not self.__is_dataset_consistent(dataset):
raise ValueError(NOT_CONSISTENT_ERR_MSG)
return dataset
def map(self, dataset):
"""Maps the dataset to supported visualizations."""
viztypes = self.__get_datapoint_types(dataset)
properties = vizmapper.get_visualization_properties(dataset, viztypes)
suitables = vizmapper.check_possibilities(properties)
self.__contains_datefields = vizmapper.has_date(viztypes)
return suitables
def has_date_points(self):
"""Returns true if the data contains dates."""
return self.__contains_datefields
def __is_dataset_consistent(self, dataset):
"""Checks if the dataset is consistent."""
consistent = profiler.is_dataset_consistent(dataset)
return consistent
def __get_datapoint_types(self, dataset):
"""Returns all containing visualization types."""
viztypes = profiler.get_datapoint_types(dataset[0])
return viztypes
def __validate_input(self, inputdata):
"""Validates the input data:"""
validdata = []
if self.__mergedata:
validdata = self.__merged_dataset_validation(inputdata)
else:
validdata = self.__dataset_validation(inputdata)
return validdata
def __merged_dataset_validation(self, inputdata):
"""Validate the data by merging all shared keys."""
allkeys = validater.get_all_keys_in_dataset(inputdata)
sharedkeys = validater.determine_shared_keys_in_dataset(allkeys,
inputdata)
dataset = validater.generate_valid_dataset_from_shared_keys(sharedkeys,
inputdata)
return dataset
def __dataset_validation(self, inputdata):
"""Validate the unmerged data by counting the keys."""
keycount = validater.count_keys_in_raw_data(inputdata)
validkeys = validater.validate_data_keys(keycount)
dataset = validater.generate_valid_dataset(validkeys, inputdata)
return dataset
| # Copyright (c) 2014 - 2015, <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -*- coding: utf-8 -*-
""" The input manager reads files and strings containing
datasets in json and csv format. The data is automatically
validated and corrected if necessary."""
from . import inputreader as reader
from . import datavalidater as validater
from . import consistenceprofiler as profiler
from . import visualizationmapper as vizmapper
NOT_CONSISTENT_ERR_MSG = "Data is not consistent."
NO_DATA_LOADED_ERR_MSG = "Unexpected data source."
class InputManager(object):
"""Contains and manages the data."""
# Input Managers can try to merge false datapoints or not.
def __init__(self, mergedata=False):
self.__mergedata = mergedata
self.__contains_datefields = False
def read(self, source):
"""Reads the input source."""
inputdata = reader.load_input_source(source)
# Raise an error if the data source is empty or nor readable.
if not inputdata:
raise ValueError(NO_DATA_LOADED_ERR_MSG)
dataset = self.__validate_input(inputdata)
# Raise an error if the dataset is not consistent.
if not self.__is_dataset_consistent(dataset):
raise ValueError(NOT_CONSISTENT_ERR_MSG)
return dataset
def map(self, dataset):
"""Maps the dataset to supported visualizations."""
viztypes = self.__get_datapoint_types(dataset)
properties = vizmapper.get_visualization_properties(dataset, viztypes)
suitables = vizmapper.check_possibilities(properties)
self.__contains_datefields = vizmapper.has_date(viztypes)
return suitables
def has_date_points(self):
"""Returns true if the data contains dates."""
return self.__contains_datefields
def __is_dataset_consistent(self, dataset):
"""Checks if the dataset is consistent."""
consistent = profiler.is_dataset_consistent(dataset)
return consistent
def __get_datapoint_types(self, dataset):
"""Returns all containing visualization types."""
viztypes = profiler.get_datapoint_types(dataset[0])
return viztypes
def __validate_input(self, inputdata):
"""Validates the input data:"""
validdata = []
if self.__mergedata:
validdata = self.__merged_dataset_validation(inputdata)
else:
validdata = self.__dataset_validation(inputdata)
return validdata
def __merged_dataset_validation(self, inputdata):
"""Validate the data by merging all shared keys."""
allkeys = validater.get_all_keys_in_dataset(inputdata)
sharedkeys = validater.determine_shared_keys_in_dataset(allkeys,
inputdata)
dataset = validater.generate_valid_dataset_from_shared_keys(sharedkeys,
inputdata)
return dataset
def __dataset_validation(self, inputdata):
"""Validate the unmerged data by counting the keys."""
keycount = validater.count_keys_in_raw_data(inputdata)
validkeys = validater.validate_data_keys(keycount)
dataset = validater.generate_valid_dataset(validkeys, inputdata)
return dataset | en | 0.69546 | # Copyright (c) 2014 - 2015, <NAME> # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # -*- coding: utf-8 -*- The input manager reads files and strings containing datasets in json and csv format. The data is automatically validated and corrected if necessary. Contains and manages the data. # Input Managers can try to merge false datapoints or not. Reads the input source. # Raise an error if the data source is empty or nor readable. # Raise an error if the dataset is not consistent. Maps the dataset to supported visualizations. Returns true if the data contains dates. Checks if the dataset is consistent. Returns all containing visualization types. Validates the input data: Validate the data by merging all shared keys. Validate the unmerged data by counting the keys. | 1.300688 | 1 |
solutions/1037_valid_boomerang.py | YiqunPeng/leetcode_pro | 0 | 6631801 | <reponame>YiqunPeng/leetcode_pro<gh_stars>0
class Solution:
def isBoomerang(self, points: List[List[int]]) -> bool:
ab = (points[1][0] - points[0][0], points[1][1] - points[0][1])
bc = (points[2][0] - points[1][0], points[2][1] - points[1][1])
return ab != (0, 0) and bc != (0, 0) and ab[0] * bc[1] != ab[1] * bc[0]
| class Solution:
def isBoomerang(self, points: List[List[int]]) -> bool:
ab = (points[1][0] - points[0][0], points[1][1] - points[0][1])
bc = (points[2][0] - points[1][0], points[2][1] - points[1][1])
return ab != (0, 0) and bc != (0, 0) and ab[0] * bc[1] != ab[1] * bc[0] | none | 1 | 3.083099 | 3 |
|
lib/pios/loader.py | creative-workflow/pi-setup | 1 | 6631802 | import sys, os, osx, linux
def is_linux():
return (sys.platform == 'linux' or sys.platform == 'linux2')
def is_osx():
return sys.platform == 'darwin'
def is_windows():
return sys.platform == 'win32'
def get_wrapper(wrapper = None):
if not wrapper:
if is_linux():
return linux.wrapper
if is_osx():
return osx.wrapper
raise Exception('os %s not supported' % os.name)
| import sys, os, osx, linux
def is_linux():
return (sys.platform == 'linux' or sys.platform == 'linux2')
def is_osx():
return sys.platform == 'darwin'
def is_windows():
return sys.platform == 'win32'
def get_wrapper(wrapper = None):
if not wrapper:
if is_linux():
return linux.wrapper
if is_osx():
return osx.wrapper
raise Exception('os %s not supported' % os.name)
| none | 1 | 3.122849 | 3 |
|
scripts/speed_ch.py | vanttec/rb_missions | 0 | 6631803 | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
from usv_perception.msg import obj_detected
from usv_perception.msg import obj_detected_list
from std_msgs.msg import Float32MultiArray
from std_msgs.msg import Int32
from geometry_msgs.msg import Pose2D
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
import numpy as np
import math
import time
import matplotlib.pyplot as plt
#EARTH_RADIUS = 6371000
class SpeedCh:
def __init__(self):
self.obj_list = []
self.activated = True
self.state = -1
self.NEDx = 0
self.NEDy = 0
self.yaw = 0
self.lat = 0
self.lon = 0
self.InitTime = rospy.Time.now().secs
self.distance = 0
self.offset = .55 #camera to ins offset
self.target_x = 0
self.target_y = 0
self.gate_x = 0
self.gate_y = 0
self.ned_alpha = 0
rospy.Subscriber("/vectornav/ins_2d/NED_pose", Pose2D, self.ins_pose_callback)
rospy.Subscriber('/usv_perception/yolo_zed/objects_detected', obj_detected_list, self.objs_callback)
self.path_pub = rospy.Publisher('/mission/waypoints', Float32MultiArray, queue_size=10)
self.status_pub = rospy.Publisher("/status", Int32, queue_size=10)
self.test = rospy.Publisher("/test", Int32, queue_size=10)
def ins_pose_callback(self,pose):
self.NEDx = pose.x
self.NEDy = pose.y
self.yaw = pose.theta
def objs_callback(self,data):
self.obj_list = []
for i in range(data.len):
if str(data.objects[i].clase) == 'bouy':
self.obj_list.append({'X' : data.objects[i].X + self.offset, 'Y' : data.objects[i].Y, 'color' : data.objects[i].color, 'class' : data.objects[i].clase})
'''
def gps_point_trans(self,y,x):
p = np.array([x,y])
J = np.array([[math.cos(self.yaw), -1*math.sin(self.yaw)],[math.sin(self.yaw), math.cos(self.yaw)]])
n = J.dot(p)
phi1 = math.radians(self.lat)
latitude2 = self.lat + (n[1] / EARTH_RADIUS) * (180 / math.pi)
longitude2 = self.lon + (n[0] / EARTH_RADIUS) * (180 / math.pi) / math.cos(phi1)
return latitude2,longitude2
'''
def punto_medio(self):
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(self.obj_list)):
x_list.append(self.obj_list[i]['X'])
y_list.append(self.obj_list[i]['Y'])
class_list.append(self.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_g1 = np.argsort(distance_list)[0]
ind_g2 = np.argsort(distance_list)[1]
x1 = x_list[ind_g1]
y1 = -1*y_list[ind_g1]
x2 = x_list[ind_g2]
y2 = -1*y_list[ind_g2]
xc = min([x1,x2]) + abs(x1 - x2)/2
yc = min([y1,y2]) + abs(y1 - y2)/2
if y1 < y2:
yl = y1
xl = x1
yr = y2
xr = x2
else:
yl = y2
xl = x2
yr = y1
xr = x1
yd = yl - yr
xd = xl - xr
alpha = math.atan2(yd,xd) + math.pi/2
if (abs(alpha) > (math.pi)):
alpha = (alpha/abs(alpha))*(abs(alpha)-2*math.pi)
self.ned_alpha = alpha + self.yaw
if (abs(self.ned_alpha) > (math.pi)):
self.ned_alpha = (self.ned_alpha/abs(self.ned_alpha))*(abs(self.ned_alpha)-2*math.pi)
xm, ym = self.gate_to_body(2,0,alpha,xc,yc)
self.target_x, self.target_y = self.body_to_ned(xm, ym)
self.gate_x, self.gate_y = self.body_to_ned(xc, yc)
obj = Float32MultiArray()
obj.layout.data_offset = 5
obj.data = [xc, yc, xm, ym, 2]
self.desired(obj)
def waypoints_vuelta(self,v_x,v_y):
print('Empezo waypoints')
radio = 3
w1 = [v_x,v_y+radio]
w2 = [v_x+radio,v_y]
w3 = [v_x,v_y-radio]
obj = Float32MultiArray()
obj.layout.data_offset = 11
w1_x, w1_y = self.body_to_ned(w1[0],w1[1])
w2_x, w2_y = self.body_to_ned(w2[0],w2[1])
w3_x, w3_y = self.body_to_ned(w3[0],w3[1])
w5_x, w5_y = self.gate_to_ned(-3, 0, self.ned_alpha, self.gate_x, self.gate_y)
#obj.data = [(self.gps_point_trans(w1[0],w1[1]))[0],(self.gps_point_trans(w1[0],w1[1]))[1],(self.gps_point_trans(w2[0],w2[1]))[0],(self.gps_point_trans(w2[0],w2[1]))[1],(self.gps_point_trans(w3[0],w3[1]))[0],(self.gps_point_trans(w3[0],w3[1]))[1],self.start_gps[0],self.start_gps[1],1]
obj.data = [w1_x, w1_y, w2_x, w2_y, w3_x, w3_y, self.gate_x, self.gate_y, w5_x, w5_y, 0]
self.desired(obj)
def farther(self):
self.target_x, self.target_y = self.gate_to_ned(1.5, 0, self.ned_alpha, self.target_x, self.target_y)
obj = Float32MultiArray()
obj.layout.data_offset = 3
obj.data = [self.target_x, self.target_y, 0]
self.desired(obj)
def gate_to_body(self, gate_x2, gate_y2, alpha, body_x1, body_y1):
p = np.array([[gate_x2],[gate_y2]])
J = np.array([[math.cos(alpha), -1*math.sin(alpha)],[math.sin(alpha), math.cos(alpha)]])
n = J.dot(p)
body_x2 = n[0] + body_x1
body_y2 = n[1] + body_y1
return (body_x2, body_y2)
def body_to_ned(self, x, y):
p = np.array([x,y])
J = np.array([[math.cos(self.yaw), -1*math.sin(self.yaw)],[math.sin(self.yaw), math.cos(self.yaw)]])
n = J.dot(p)
nedx = n[0] + self.NEDx
nedy = n[1] + self.NEDy
return (nedx, nedy)
def gate_to_ned(self, gate_x2, gate_y2, alpha, ned_x1, ned_y1):
p = np.array([[gate_x2],[gate_y2]])
J = np.array([[math.cos(alpha), -1*math.sin(alpha)],[math.sin(alpha), math.cos(alpha)]])
n = J.dot(p)
ned_x2 = n[0] + ned_x1
ned_y2 = n[1] + ned_y1
return (ned_x2, ned_y2)
def desired(self, obj):
self.path_pub.publish(obj)
def main():
rospy.init_node('speed_ch', anonymous=True)
rate = rospy.Rate(100)
E = SpeedCh()
E.distance = 4
while not rospy.is_shutdown() and E.activated:
if E.state == -1:
while not rospy.is_shutdown() and len(E.obj_list) < 2:
E.test.publish(E.state)
rate.sleep()
E.state = 0
elif E.state == 0:
E.test.publish(E.state)
if len(E.obj_list) >= 2 and E.distance >= 3:
E.punto_medio()
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list) < 2 or E.distance < 3):
if rospy.Time.now().secs - initTime > 3:
E.state = 1
rate.sleep()
break
elif E.state == 1:
E.test.publish(E.state)
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(E.obj_list)):
x_list.append(E.obj_list[i]['X'])
y_list.append(E.obj_list[i]['Y'])
class_list.append(E.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_0 = np.argsort(distance_list)[0]
if len(E.obj_list) >= 1 and (str(E.obj_list[ind_0]['color']) == 'blue'):
E.state = 2
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list)) < 1:
if rospy.Time.now().secs - initTime > 1:
E.farther()
rate.sleep()
break
elif E.state == 2:
E.test.publish(E.state)
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(E.obj_list)):
x_list.append(E.obj_list[i]['X'])
y_list.append(E.obj_list[i]['Y'])
class_list.append(E.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_0 = np.argsort(distance_list)[0]
if (len(E.obj_list) >= 1) and (E.obj_list[ind_0]['X'] < 8):
v_x = E.obj_list[0]['X']
v_y = E.obj_list[0]['Y']
E.state = 3
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list)) < 1:
if rospy.Time.now().secs - initTime > 1:
E.farther()
rate.sleep()
break
elif E.state == 3:
E.test.publish(E.state)
E.waypoints_vuelta(v_x,v_y)
E.state = 4
elif E.state == 4:
E.test.publish(E.state)
time.sleep(1)
E.status_pub.publish(1)
rate.sleep()
rospy.spin()
if __name__ == "__main__":
try:
main()
except rospy.ROSInterruptException:
pass
| #!/usr/bin/env python
import rospy
from std_msgs.msg import String
from usv_perception.msg import obj_detected
from usv_perception.msg import obj_detected_list
from std_msgs.msg import Float32MultiArray
from std_msgs.msg import Int32
from geometry_msgs.msg import Pose2D
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
import numpy as np
import math
import time
import matplotlib.pyplot as plt
#EARTH_RADIUS = 6371000
class SpeedCh:
def __init__(self):
self.obj_list = []
self.activated = True
self.state = -1
self.NEDx = 0
self.NEDy = 0
self.yaw = 0
self.lat = 0
self.lon = 0
self.InitTime = rospy.Time.now().secs
self.distance = 0
self.offset = .55 #camera to ins offset
self.target_x = 0
self.target_y = 0
self.gate_x = 0
self.gate_y = 0
self.ned_alpha = 0
rospy.Subscriber("/vectornav/ins_2d/NED_pose", Pose2D, self.ins_pose_callback)
rospy.Subscriber('/usv_perception/yolo_zed/objects_detected', obj_detected_list, self.objs_callback)
self.path_pub = rospy.Publisher('/mission/waypoints', Float32MultiArray, queue_size=10)
self.status_pub = rospy.Publisher("/status", Int32, queue_size=10)
self.test = rospy.Publisher("/test", Int32, queue_size=10)
def ins_pose_callback(self,pose):
self.NEDx = pose.x
self.NEDy = pose.y
self.yaw = pose.theta
def objs_callback(self,data):
self.obj_list = []
for i in range(data.len):
if str(data.objects[i].clase) == 'bouy':
self.obj_list.append({'X' : data.objects[i].X + self.offset, 'Y' : data.objects[i].Y, 'color' : data.objects[i].color, 'class' : data.objects[i].clase})
'''
def gps_point_trans(self,y,x):
p = np.array([x,y])
J = np.array([[math.cos(self.yaw), -1*math.sin(self.yaw)],[math.sin(self.yaw), math.cos(self.yaw)]])
n = J.dot(p)
phi1 = math.radians(self.lat)
latitude2 = self.lat + (n[1] / EARTH_RADIUS) * (180 / math.pi)
longitude2 = self.lon + (n[0] / EARTH_RADIUS) * (180 / math.pi) / math.cos(phi1)
return latitude2,longitude2
'''
def punto_medio(self):
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(self.obj_list)):
x_list.append(self.obj_list[i]['X'])
y_list.append(self.obj_list[i]['Y'])
class_list.append(self.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_g1 = np.argsort(distance_list)[0]
ind_g2 = np.argsort(distance_list)[1]
x1 = x_list[ind_g1]
y1 = -1*y_list[ind_g1]
x2 = x_list[ind_g2]
y2 = -1*y_list[ind_g2]
xc = min([x1,x2]) + abs(x1 - x2)/2
yc = min([y1,y2]) + abs(y1 - y2)/2
if y1 < y2:
yl = y1
xl = x1
yr = y2
xr = x2
else:
yl = y2
xl = x2
yr = y1
xr = x1
yd = yl - yr
xd = xl - xr
alpha = math.atan2(yd,xd) + math.pi/2
if (abs(alpha) > (math.pi)):
alpha = (alpha/abs(alpha))*(abs(alpha)-2*math.pi)
self.ned_alpha = alpha + self.yaw
if (abs(self.ned_alpha) > (math.pi)):
self.ned_alpha = (self.ned_alpha/abs(self.ned_alpha))*(abs(self.ned_alpha)-2*math.pi)
xm, ym = self.gate_to_body(2,0,alpha,xc,yc)
self.target_x, self.target_y = self.body_to_ned(xm, ym)
self.gate_x, self.gate_y = self.body_to_ned(xc, yc)
obj = Float32MultiArray()
obj.layout.data_offset = 5
obj.data = [xc, yc, xm, ym, 2]
self.desired(obj)
def waypoints_vuelta(self,v_x,v_y):
print('Empezo waypoints')
radio = 3
w1 = [v_x,v_y+radio]
w2 = [v_x+radio,v_y]
w3 = [v_x,v_y-radio]
obj = Float32MultiArray()
obj.layout.data_offset = 11
w1_x, w1_y = self.body_to_ned(w1[0],w1[1])
w2_x, w2_y = self.body_to_ned(w2[0],w2[1])
w3_x, w3_y = self.body_to_ned(w3[0],w3[1])
w5_x, w5_y = self.gate_to_ned(-3, 0, self.ned_alpha, self.gate_x, self.gate_y)
#obj.data = [(self.gps_point_trans(w1[0],w1[1]))[0],(self.gps_point_trans(w1[0],w1[1]))[1],(self.gps_point_trans(w2[0],w2[1]))[0],(self.gps_point_trans(w2[0],w2[1]))[1],(self.gps_point_trans(w3[0],w3[1]))[0],(self.gps_point_trans(w3[0],w3[1]))[1],self.start_gps[0],self.start_gps[1],1]
obj.data = [w1_x, w1_y, w2_x, w2_y, w3_x, w3_y, self.gate_x, self.gate_y, w5_x, w5_y, 0]
self.desired(obj)
def farther(self):
self.target_x, self.target_y = self.gate_to_ned(1.5, 0, self.ned_alpha, self.target_x, self.target_y)
obj = Float32MultiArray()
obj.layout.data_offset = 3
obj.data = [self.target_x, self.target_y, 0]
self.desired(obj)
def gate_to_body(self, gate_x2, gate_y2, alpha, body_x1, body_y1):
p = np.array([[gate_x2],[gate_y2]])
J = np.array([[math.cos(alpha), -1*math.sin(alpha)],[math.sin(alpha), math.cos(alpha)]])
n = J.dot(p)
body_x2 = n[0] + body_x1
body_y2 = n[1] + body_y1
return (body_x2, body_y2)
def body_to_ned(self, x, y):
p = np.array([x,y])
J = np.array([[math.cos(self.yaw), -1*math.sin(self.yaw)],[math.sin(self.yaw), math.cos(self.yaw)]])
n = J.dot(p)
nedx = n[0] + self.NEDx
nedy = n[1] + self.NEDy
return (nedx, nedy)
def gate_to_ned(self, gate_x2, gate_y2, alpha, ned_x1, ned_y1):
p = np.array([[gate_x2],[gate_y2]])
J = np.array([[math.cos(alpha), -1*math.sin(alpha)],[math.sin(alpha), math.cos(alpha)]])
n = J.dot(p)
ned_x2 = n[0] + ned_x1
ned_y2 = n[1] + ned_y1
return (ned_x2, ned_y2)
def desired(self, obj):
self.path_pub.publish(obj)
def main():
rospy.init_node('speed_ch', anonymous=True)
rate = rospy.Rate(100)
E = SpeedCh()
E.distance = 4
while not rospy.is_shutdown() and E.activated:
if E.state == -1:
while not rospy.is_shutdown() and len(E.obj_list) < 2:
E.test.publish(E.state)
rate.sleep()
E.state = 0
elif E.state == 0:
E.test.publish(E.state)
if len(E.obj_list) >= 2 and E.distance >= 3:
E.punto_medio()
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list) < 2 or E.distance < 3):
if rospy.Time.now().secs - initTime > 3:
E.state = 1
rate.sleep()
break
elif E.state == 1:
E.test.publish(E.state)
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(E.obj_list)):
x_list.append(E.obj_list[i]['X'])
y_list.append(E.obj_list[i]['Y'])
class_list.append(E.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_0 = np.argsort(distance_list)[0]
if len(E.obj_list) >= 1 and (str(E.obj_list[ind_0]['color']) == 'blue'):
E.state = 2
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list)) < 1:
if rospy.Time.now().secs - initTime > 1:
E.farther()
rate.sleep()
break
elif E.state == 2:
E.test.publish(E.state)
x_list = []
y_list = []
class_list = []
distance_list = []
for i in range(len(E.obj_list)):
x_list.append(E.obj_list[i]['X'])
y_list.append(E.obj_list[i]['Y'])
class_list.append(E.obj_list[i]['class'])
distance_list.append(math.pow(x_list[i]**2 + y_list[i]**2, 0.5))
ind_0 = np.argsort(distance_list)[0]
if (len(E.obj_list) >= 1) and (E.obj_list[ind_0]['X'] < 8):
v_x = E.obj_list[0]['X']
v_y = E.obj_list[0]['Y']
E.state = 3
else:
initTime = rospy.Time.now().secs
while not rospy.is_shutdown() and (len(E.obj_list)) < 1:
if rospy.Time.now().secs - initTime > 1:
E.farther()
rate.sleep()
break
elif E.state == 3:
E.test.publish(E.state)
E.waypoints_vuelta(v_x,v_y)
E.state = 4
elif E.state == 4:
E.test.publish(E.state)
time.sleep(1)
E.status_pub.publish(1)
rate.sleep()
rospy.spin()
if __name__ == "__main__":
try:
main()
except rospy.ROSInterruptException:
pass
| en | 0.152414 | #!/usr/bin/env python #EARTH_RADIUS = 6371000 #camera to ins offset def gps_point_trans(self,y,x): p = np.array([x,y]) J = np.array([[math.cos(self.yaw), -1*math.sin(self.yaw)],[math.sin(self.yaw), math.cos(self.yaw)]]) n = J.dot(p) phi1 = math.radians(self.lat) latitude2 = self.lat + (n[1] / EARTH_RADIUS) * (180 / math.pi) longitude2 = self.lon + (n[0] / EARTH_RADIUS) * (180 / math.pi) / math.cos(phi1) return latitude2,longitude2 #obj.data = [(self.gps_point_trans(w1[0],w1[1]))[0],(self.gps_point_trans(w1[0],w1[1]))[1],(self.gps_point_trans(w2[0],w2[1]))[0],(self.gps_point_trans(w2[0],w2[1]))[1],(self.gps_point_trans(w3[0],w3[1]))[0],(self.gps_point_trans(w3[0],w3[1]))[1],self.start_gps[0],self.start_gps[1],1] | 2.274906 | 2 |
python/search/find_common_sorted.py | amitsaha/playground | 4 | 6631804 | <reponame>amitsaha/playground<gh_stars>1-10
"""
Find the common elements among two sorted sets
Desired time complexity: O(m+n)
"""
# Uses a hash table (hence uses O(min(m,n)) extra storage
# space
# This doesn't need the arrays to be sorted
def find_common(hash_t, arr):
for item in arr:
if hash_t.has_key(item):
print item
def find_sorted_hash(arr1, arr2):
if len(arr1) < len(arr2):
hash_t = {k:1 for k in arr1}
find_common(hash_t, arr2)
else:
hash_t = {k:1 for k in arr2}
find_common(hash_t, arr1)
# No extra storage space
# The array must be sorted
# O(m+n)
def find_common_traverse(arr1, arr2):
i,j = 0,0
while i < len(arr1) and j < len(arr2):
if arr1[i] == arr2[j]:
print arr1[i]
i += 1
j += 1
elif arr1[i] > arr2[j]:
j += 1
else:
i += 1
arr1 = [1,10,20,25,30]
arr2 = [1,10,30]
#find_sorted_hash(arr1, arr2)
find_common_traverse(arr1, arr2)
| """
Find the common elements among two sorted sets
Desired time complexity: O(m+n)
"""
# Uses a hash table (hence uses O(min(m,n)) extra storage
# space
# This doesn't need the arrays to be sorted
def find_common(hash_t, arr):
for item in arr:
if hash_t.has_key(item):
print item
def find_sorted_hash(arr1, arr2):
if len(arr1) < len(arr2):
hash_t = {k:1 for k in arr1}
find_common(hash_t, arr2)
else:
hash_t = {k:1 for k in arr2}
find_common(hash_t, arr1)
# No extra storage space
# The array must be sorted
# O(m+n)
def find_common_traverse(arr1, arr2):
i,j = 0,0
while i < len(arr1) and j < len(arr2):
if arr1[i] == arr2[j]:
print arr1[i]
i += 1
j += 1
elif arr1[i] > arr2[j]:
j += 1
else:
i += 1
arr1 = [1,10,20,25,30]
arr2 = [1,10,30]
#find_sorted_hash(arr1, arr2)
find_common_traverse(arr1, arr2) | en | 0.713311 | Find the common elements among two sorted sets Desired time complexity: O(m+n) # Uses a hash table (hence uses O(min(m,n)) extra storage # space # This doesn't need the arrays to be sorted # No extra storage space # The array must be sorted # O(m+n) #find_sorted_hash(arr1, arr2) | 4.04682 | 4 |
tests/riscv/APIs/AccessReservedRegisterTest_force.py | Wlgen/force-riscv | 0 | 6631805 | #
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from riscv.EnvRISCV import EnvRISCV
from riscv.GenThreadRISCV import GenThreadRISCV
from base.Sequence import Sequence
from base.TestUtils import assert_not_in
# import instruction_tree module from the current directory
from DV.riscv.trees.instruction_tree import *
# check #1: reserve/unreserve successive GPR registers; should NOT be able
# to access a reserved register via getRandomGPR functions.
# check #2: reserve sets of registers with limited access rights; generate
# random instructions, insuring that each generated instruction
# honors the reserved registers.
class MainSequence(Sequence):
def generate(self, **kargs):
self.reservedIndexedRegCheck()
self.instrCheck()
def reservedIndexedRegCheck(self):
for i in range(1, 30):
reserved_gpr = "x%d" % i
# lets try read 1st...
if not self.isRegisterReserved(reserved_gpr, "Read"):
print("[DEBUG] next 'Read' reserved register is %s (ID: %d)\n" % (reserved_gpr, i))
self.reserveRegister(reserved_gpr, "Read")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "Read")
# then write...
if not self.isRegisterReserved(reserved_gpr, "Write"):
print(
"[DEBUG] next 'Write' reserved register is %s (ID: %d)\n" % (reserved_gpr, i)
)
self.reserveRegister(reserved_gpr, "Write")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "Write")
# then read/write...
if not self.isRegisterReserved(reserved_gpr, "ReadWrite"):
print(
"[DEBUG] next 'Read/Write' reserved register is %s "
"(ID: %d)\n" % (reserved_gpr, i)
)
self.reserveRegister(reserved_gpr, "ReadWrite")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "ReadWrite")
def instrCheck(self):
read_only_regs = self.reservedRandomRegCheck("Write")
write_only_regs = self.reservedRandomRegCheck("Read")
cant_access_regs = self.reservedRandomRegCheck("ReadWrite")
print("[DEBUG INSTR-CHECK] read-only regs: ", read_only_regs)
print("[DEBUG INSTR-CHECK] write-only regs: ", write_only_regs)
print("[DEBUG INSTR-CHECK] no-access regs: ", cant_access_regs)
for i in range(20):
random_instr = self.pickWeighted(ALU_Int32_instructions)
instr = self.genInstruction(random_instr)
instr_info = self.queryInstructionRecord(instr)
dests = instr_info["Dests"]
for rname, rvalue in dests.items():
assert_not_in(
rname,
read_only_regs,
(
"OOPS! random '%s' instruction used reserved (read-only) register (%s) as dest operand???"
% (instr_info["Name"], rname)
),
)
assert_not_in(
rname,
cant_access_regs,
(
"OOPS! random '%s' instruction used reserved (read,write not allowed) register (%s) as dest operand???"
% (instr_info["Name"], rname)
),
)
srcs = instr_info["Srcs"]
for rname, rvalue in srcs.items():
assert_not_in(
rname,
write_only_regs,
(
"OOPS! random '%s' instruction used reserved (write-only) register (%s) as src operand???"
% (instr_info["Name"], rname)
),
)
assert_not_in(
rname,
cant_access_regs,
(
"OOPS! random '%s' instruction used reserved (read,write not allowed) register (%s) as src operand???"
% (instr_info["Name"], rname)
),
)
self.freeReservedRegs(read_only_regs, "Write")
self.freeReservedRegs(write_only_regs, "Read")
self.freeReservedRegs(cant_access_regs, "ReadWrite")
def accessReservedReg(self, reserved_reg_id):
rnd_set = []
for j in range(100):
rnd_set.append(self.getRandomGPR())
(r1, r2, r3, r4, r5) = self.getRandomRegisters(5, "GPR", "%d" % reserved_reg_id)
if r1 not in rnd_set:
rnd_set.append(r1)
if r2 not in rnd_set:
rnd_set.append(r2)
if r3 not in rnd_set:
rnd_set.append(r3)
if r4 not in rnd_set:
rnd_set.append(r4)
if r5 not in rnd_set:
rnd_set.append(r5)
print("\t[DEBUG] random gprs: ", rnd_set)
assert_not_in(
reserved_reg_id,
rnd_set,
("OOPS! RandomGPR returned reserved register (X%d) ???" % reserved_reg_id),
)
def reservedRandomRegCheck(self, access_to_deny):
(gpr1, gpr2, gpr3) = self.getRandomRegisters(3, "GPR", "31")
self.reserveRegister("x%d" % gpr1, access_to_deny)
self.accessReservedReg(gpr1)
self.reserveRegister("x%d" % gpr2, access_to_deny)
self.accessReservedReg(gpr2)
self.reserveRegister("x%d" % gpr3, access_to_deny)
self.accessReservedReg(gpr3)
return [gpr1, gpr2, gpr3]
def freeReservedRegs(self, rlist, denied_access):
for reg in rlist:
self.unreserveRegister("x%d" % reg, denied_access)
# Points to the MainSequence defined in this file
MainSequenceClass = MainSequence
# Using GenThreadRISCV by default, can be overriden with extended classes
GenThreadClass = GenThreadRISCV
# Using EnvRISCV by default, can be overriden with extended classes
EnvClass = EnvRISCV
| #
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from riscv.EnvRISCV import EnvRISCV
from riscv.GenThreadRISCV import GenThreadRISCV
from base.Sequence import Sequence
from base.TestUtils import assert_not_in
# import instruction_tree module from the current directory
from DV.riscv.trees.instruction_tree import *
# check #1: reserve/unreserve successive GPR registers; should NOT be able
# to access a reserved register via getRandomGPR functions.
# check #2: reserve sets of registers with limited access rights; generate
# random instructions, insuring that each generated instruction
# honors the reserved registers.
class MainSequence(Sequence):
def generate(self, **kargs):
self.reservedIndexedRegCheck()
self.instrCheck()
def reservedIndexedRegCheck(self):
for i in range(1, 30):
reserved_gpr = "x%d" % i
# lets try read 1st...
if not self.isRegisterReserved(reserved_gpr, "Read"):
print("[DEBUG] next 'Read' reserved register is %s (ID: %d)\n" % (reserved_gpr, i))
self.reserveRegister(reserved_gpr, "Read")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "Read")
# then write...
if not self.isRegisterReserved(reserved_gpr, "Write"):
print(
"[DEBUG] next 'Write' reserved register is %s (ID: %d)\n" % (reserved_gpr, i)
)
self.reserveRegister(reserved_gpr, "Write")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "Write")
# then read/write...
if not self.isRegisterReserved(reserved_gpr, "ReadWrite"):
print(
"[DEBUG] next 'Read/Write' reserved register is %s "
"(ID: %d)\n" % (reserved_gpr, i)
)
self.reserveRegister(reserved_gpr, "ReadWrite")
self.accessReservedReg(i)
self.unreserveRegister(reserved_gpr, "ReadWrite")
def instrCheck(self):
read_only_regs = self.reservedRandomRegCheck("Write")
write_only_regs = self.reservedRandomRegCheck("Read")
cant_access_regs = self.reservedRandomRegCheck("ReadWrite")
print("[DEBUG INSTR-CHECK] read-only regs: ", read_only_regs)
print("[DEBUG INSTR-CHECK] write-only regs: ", write_only_regs)
print("[DEBUG INSTR-CHECK] no-access regs: ", cant_access_regs)
for i in range(20):
random_instr = self.pickWeighted(ALU_Int32_instructions)
instr = self.genInstruction(random_instr)
instr_info = self.queryInstructionRecord(instr)
dests = instr_info["Dests"]
for rname, rvalue in dests.items():
assert_not_in(
rname,
read_only_regs,
(
"OOPS! random '%s' instruction used reserved (read-only) register (%s) as dest operand???"
% (instr_info["Name"], rname)
),
)
assert_not_in(
rname,
cant_access_regs,
(
"OOPS! random '%s' instruction used reserved (read,write not allowed) register (%s) as dest operand???"
% (instr_info["Name"], rname)
),
)
srcs = instr_info["Srcs"]
for rname, rvalue in srcs.items():
assert_not_in(
rname,
write_only_regs,
(
"OOPS! random '%s' instruction used reserved (write-only) register (%s) as src operand???"
% (instr_info["Name"], rname)
),
)
assert_not_in(
rname,
cant_access_regs,
(
"OOPS! random '%s' instruction used reserved (read,write not allowed) register (%s) as src operand???"
% (instr_info["Name"], rname)
),
)
self.freeReservedRegs(read_only_regs, "Write")
self.freeReservedRegs(write_only_regs, "Read")
self.freeReservedRegs(cant_access_regs, "ReadWrite")
def accessReservedReg(self, reserved_reg_id):
rnd_set = []
for j in range(100):
rnd_set.append(self.getRandomGPR())
(r1, r2, r3, r4, r5) = self.getRandomRegisters(5, "GPR", "%d" % reserved_reg_id)
if r1 not in rnd_set:
rnd_set.append(r1)
if r2 not in rnd_set:
rnd_set.append(r2)
if r3 not in rnd_set:
rnd_set.append(r3)
if r4 not in rnd_set:
rnd_set.append(r4)
if r5 not in rnd_set:
rnd_set.append(r5)
print("\t[DEBUG] random gprs: ", rnd_set)
assert_not_in(
reserved_reg_id,
rnd_set,
("OOPS! RandomGPR returned reserved register (X%d) ???" % reserved_reg_id),
)
def reservedRandomRegCheck(self, access_to_deny):
(gpr1, gpr2, gpr3) = self.getRandomRegisters(3, "GPR", "31")
self.reserveRegister("x%d" % gpr1, access_to_deny)
self.accessReservedReg(gpr1)
self.reserveRegister("x%d" % gpr2, access_to_deny)
self.accessReservedReg(gpr2)
self.reserveRegister("x%d" % gpr3, access_to_deny)
self.accessReservedReg(gpr3)
return [gpr1, gpr2, gpr3]
def freeReservedRegs(self, rlist, denied_access):
for reg in rlist:
self.unreserveRegister("x%d" % reg, denied_access)
# Points to the MainSequence defined in this file
MainSequenceClass = MainSequence
# Using GenThreadRISCV by default, can be overriden with extended classes
GenThreadClass = GenThreadRISCV
# Using EnvRISCV by default, can be overriden with extended classes
EnvClass = EnvRISCV
| en | 0.778858 | # # Copyright (C) [2020] Futurewei Technologies, Inc. # # FORCE-RISCV is licensed under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES # OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO # NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. # See the License for the specific language governing permissions and # limitations under the License. # # import instruction_tree module from the current directory # check #1: reserve/unreserve successive GPR registers; should NOT be able # to access a reserved register via getRandomGPR functions. # check #2: reserve sets of registers with limited access rights; generate # random instructions, insuring that each generated instruction # honors the reserved registers. # lets try read 1st... # then write... # then read/write... # Points to the MainSequence defined in this file # Using GenThreadRISCV by default, can be overriden with extended classes # Using EnvRISCV by default, can be overriden with extended classes | 2.199697 | 2 |
Data Processing/Clustering/Using spark_and_numpy/bowl_npandspark.py | An5human/IPL-Stimulator | 1 | 6631806 | from pyspark.sql import SparkSession
from pyspark.ml.feature import StandardScaler
from pyspark.ml.linalg import Vectors
from pyspark.ml.feature import VectorAssembler
from pyspark.ml.clustering import KMeans
from pyspark.sql.functions import concat, col, lit
from pyspark.sql.window import *
from pyspark.sql.functions import row_number
from pyspark import SparkContext
import numpy as np
from copy import deepcopy
spark = SparkSession\
.builder\
.appName("Pythonnpandspark")\
.getOrCreate()\
bowling = spark.read.csv('bowling_new.csv', header=True, inferSchema=True)
bowling.show(5)
name = bowling.rdd.map(lambda r: r.Bowler)
runs = bowling.rdd.map(lambda r: r.Runs)
overs = bowling.rdd.map(lambda r: r.Overs)
balls = bowling.rdd.map(lambda r: r.balls)
wickets = bowling.rdd.map(lambda r: r.Wickets)
ave = []
econ = []
sr = []
for i,j in zip(runs.collect(),wickets.collect()):
if(j == 0):
ave.append(float(0))
else:
ave.append(float(i/j))
for i,j in zip(runs.collect(),overs.collect()):
if(j == 0):
econ.append(float(0))
else:
econ.append(float(i/j))
for i,j in zip(balls.collect(),wickets.collect()):
if(j == 0):
sr.append(float(0))
else:
sr.append(float(i/j))
ave = spark.sparkContext.parallelize(ave)
econ = spark.sparkContext.parallelize(econ)
sr = spark.sparkContext.parallelize(sr)
t =[name.map(lambda x:(x, )).toDF(['name']),ave.map(lambda x:(x, )).toDF(['ave']),econ.map(lambda x:(x, )).toDF(['econ']),sr.map(lambda x:(x, )).toDF(['sr'])]
t[0]=t[0].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[1]=t[1].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[2]=t[2].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[3]=t[3].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t1 = t[0].join(t[1],on = ["row_index"])
t2 = t1.join(t[2],on = ["row_index"])
t3 = t2.join(t[3],on = ["row_index"])
t4 = t3.drop("row_index")
t4.count()
vector = VectorAssembler(inputCols=['ave','econ','sr'], outputCol='cluster_features')
vector_fit = vector.transform(t4)
standardize = StandardScaler(inputCol='cluster_features', outputCol='standardized_features')
model = standardize.fit(vector_fit)
model_data = model.transform(vector_fit)
t5 = model_data.select('name','standardized_features')
def func(row):
return (row.name,row.standardized_features)
all_b = t5.rdd.map(func)
X = np.array(all_b.collect())[:,1]
y = spark.sparkContext.parallelize(all_b.takeSample(withReplacement = False,num=4,seed = 4))
j = 0
r = np.zeros((4,3))
for i in y.collect():
r[j] = np.array(i[1])
j = j+1
def euc_dist(x,y, axis_=1):
return (np.linalg.norm(np.array(x) - y, axis=axis_))
cluster_number =(np.zeros((2796,1)))
error = 1
ee = [x for x in range(2796)]
while error >0.05:
for i in ee:
d = spark.sparkContext.parallelize(euc_dist(X[i], r,1))
cluster = np.argmin(np.array(d.collect()))
cluster_number[i] = cluster
old_r = deepcopy(r)
for i in range(4):
centroids = [X[j] for j in range(len(X)) if cluster_number[j] == i]
r[i] = np.mean(centroids, axis=0)
error = euc_dist(r, old_r,None)
print(error)
print(r)
| from pyspark.sql import SparkSession
from pyspark.ml.feature import StandardScaler
from pyspark.ml.linalg import Vectors
from pyspark.ml.feature import VectorAssembler
from pyspark.ml.clustering import KMeans
from pyspark.sql.functions import concat, col, lit
from pyspark.sql.window import *
from pyspark.sql.functions import row_number
from pyspark import SparkContext
import numpy as np
from copy import deepcopy
spark = SparkSession\
.builder\
.appName("Pythonnpandspark")\
.getOrCreate()\
bowling = spark.read.csv('bowling_new.csv', header=True, inferSchema=True)
bowling.show(5)
name = bowling.rdd.map(lambda r: r.Bowler)
runs = bowling.rdd.map(lambda r: r.Runs)
overs = bowling.rdd.map(lambda r: r.Overs)
balls = bowling.rdd.map(lambda r: r.balls)
wickets = bowling.rdd.map(lambda r: r.Wickets)
ave = []
econ = []
sr = []
for i,j in zip(runs.collect(),wickets.collect()):
if(j == 0):
ave.append(float(0))
else:
ave.append(float(i/j))
for i,j in zip(runs.collect(),overs.collect()):
if(j == 0):
econ.append(float(0))
else:
econ.append(float(i/j))
for i,j in zip(balls.collect(),wickets.collect()):
if(j == 0):
sr.append(float(0))
else:
sr.append(float(i/j))
ave = spark.sparkContext.parallelize(ave)
econ = spark.sparkContext.parallelize(econ)
sr = spark.sparkContext.parallelize(sr)
t =[name.map(lambda x:(x, )).toDF(['name']),ave.map(lambda x:(x, )).toDF(['ave']),econ.map(lambda x:(x, )).toDF(['econ']),sr.map(lambda x:(x, )).toDF(['sr'])]
t[0]=t[0].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[1]=t[1].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[2]=t[2].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t[3]=t[3].withColumn('row_index', row_number().over(Window.orderBy(lit(1))))
t1 = t[0].join(t[1],on = ["row_index"])
t2 = t1.join(t[2],on = ["row_index"])
t3 = t2.join(t[3],on = ["row_index"])
t4 = t3.drop("row_index")
t4.count()
vector = VectorAssembler(inputCols=['ave','econ','sr'], outputCol='cluster_features')
vector_fit = vector.transform(t4)
standardize = StandardScaler(inputCol='cluster_features', outputCol='standardized_features')
model = standardize.fit(vector_fit)
model_data = model.transform(vector_fit)
t5 = model_data.select('name','standardized_features')
def func(row):
return (row.name,row.standardized_features)
all_b = t5.rdd.map(func)
X = np.array(all_b.collect())[:,1]
y = spark.sparkContext.parallelize(all_b.takeSample(withReplacement = False,num=4,seed = 4))
j = 0
r = np.zeros((4,3))
for i in y.collect():
r[j] = np.array(i[1])
j = j+1
def euc_dist(x,y, axis_=1):
return (np.linalg.norm(np.array(x) - y, axis=axis_))
cluster_number =(np.zeros((2796,1)))
error = 1
ee = [x for x in range(2796)]
while error >0.05:
for i in ee:
d = spark.sparkContext.parallelize(euc_dist(X[i], r,1))
cluster = np.argmin(np.array(d.collect()))
cluster_number[i] = cluster
old_r = deepcopy(r)
for i in range(4):
centroids = [X[j] for j in range(len(X)) if cluster_number[j] == i]
r[i] = np.mean(centroids, axis=0)
error = euc_dist(r, old_r,None)
print(error)
print(r)
| none | 1 | 2.789862 | 3 |
|
bowling.py | kitestring/BowlingDataCenter | 1 | 6631807 | import random
from tkinter import * # @unusedwildimport
from tkinter import ttk # @importredefinition
from tkinter import filedialog # @importredefinition
import functools
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
matplotlib.use("TkAgg")
from jsonAPI import JSON_Tools # @unresolvedimport
import os
from SQLiteAPI import BowlingDB # @unresolvedimport
from pathlib import Path
import time
import pandas as pd
import plotter # @unresolvedimport
from subprocess import check_output
class Window(Frame):
def __init__(self, db_filepath, master=None):
Frame.__init__(self, master)
self.default_fig_size = (10,4)
self.default_fig_dpi = 100
self.master.file = db_filepath
self.master = master
self.contentframe = ttk.Frame(self.master, padding=(5, 5, 5, 5))
self.bowlers = []
self.bowlers_strvar = StringVar(value=self.bowlers)
self.plottypes = []
self.plottypes_strvar = StringVar(value=self.plottypes)
self.statusmsg = StringVar()
self.season_league = []
self.season_league_strvar = StringVar(value=self.season_league)
self.content = StringVar()
self.init_window()
def init_window(self):
# Create db object
self.bowling_db = BowlingDB(self.master.file)
self.master.protocol("WM_DELETE_WINDOW", self._delete_window)
self.master.title("Bowling Data Center v2")
# Create and grid the outer content frame
self.contentframe.grid(column=0, row=0, sticky=(N,W,E,S))
self.master.grid_columnconfigure(0, weight=1)
self.master.grid_rowconfigure(0, weight=1)
# Initialize the canvas and grit it
self.update_canvas(plotter.starting_plot())
# Create the remaining widgets
seasonleague_lbox = Listbox(self.contentframe, listvariable=self.season_league_strvar, height=5, width=25,
exportselection=FALSE, selectmode=EXTENDED, name="seasonleague")
seasonleague_lbl = ttk.Label(self.contentframe, text='Season League', anchor=W)
seasonleague_btn = Button(self.contentframe, text='Add', command=functools.partial(self.add_season_league,
param=(seasonleague_lbox)))
seasonleague_lbl_entry = Entry(self.contentframe, textvariable=self.content, width=19)
plottype_lbox = Listbox(self.contentframe, listvariable=self.plottypes_strvar, height=5, width=25,
exportselection=FALSE, selectmode=EXTENDED, name="plotyype")
plot_lbl = ttk.Label(self.contentframe, text='Plot Type', anchor=W)
bowlers_lbox = Listbox(self.contentframe, listvariable=self.bowlers_strvar, height=5, width=25, exportselection=FALSE, name="bowler")
bowlers_lbl = ttk.Label(self.contentframe, text='Bowlers', anchor=W)
preview_btn = Button(self.contentframe, text='Preview', command=functools.partial(self.parce_selections, event='preview',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
report_btn = Button(self.contentframe, text='Report', command=functools.partial(self.parce_selections, event='report',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
load_btn = Button(self.contentframe, text='Load', command=functools.partial(self.parce_selections, event ='load',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
status = ttk.Label(self.contentframe, textvariable=self.statusmsg, anchor=W)
# Grid the remaining widgets
seasonleague_lbl.grid(column=1, row=0, sticky=(S,W))
seasonleague_lbox.grid(column=1, row=1, sticky=(S,E,W))
seasonleague_btn.grid(column=1, row=2, sticky=(S,W))
seasonleague_lbl_entry.grid(column=1, row=2, sticky=(E))
plot_lbl.grid(column=1, row=3, sticky=(S,W))
plottype_lbox.grid(column=1, row=4, sticky=(S,E,W))
bowlers_lbl.grid(column=1, row=5, sticky=(S,W))
bowlers_lbox.grid(column=1, row=6, sticky=(S,E,W))
preview_btn.grid(column=1, row=7, sticky=(S,W))
report_btn.grid(column=1, row=7, sticky=(S))
load_btn.grid(column=1, row=7, padx=15, sticky=(S,E))
status.grid(column=0, row=8, columnspan=3, sticky=(W,E))
self.contentframe.grid_columnconfigure(0, weight=1)
self.contentframe.grid_rowconfigure(0, weight=1)
## Query DB to initialize listboxes,
# query db to get unique bowlers & unique season leages
# Note the query returns a dataframe which is sliced into a series and
# then converted to a list
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
# Then Colorize alternating listbox rows
self.refresh_listbox_values(Season_League, seasonleague_lbox, self.season_league_strvar)
self.refresh_listbox_values(Bowler, bowlers_lbox, self.bowlers_strvar)
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
self.alternate_listbox_rowcolors(bowlers_lbox, self.bowlers)
# define plot types listbox
# self.plottype_dbquery_columns defines which columns will need to be queried to generate the plots
plottypes = ['Average: Total', 'Average: Per-Day', 'Series: Scratch', 'Series: Handicap', 'Game Comparison', 'Average: Delta']
self.plottype_dbquery_columns = {'Average: Total': ['Days', 'Avg_After', 'Season_League'], 'Average: Per-Day': ['Days', 'Avg_Today', 'Season_League'], 'Series: Scratch': ['Days', 'SS', 'Season_League'],
'Series: Handicap': ['Days', 'HS', 'Season_League'], 'Game Comparison': ['Days', 'Gm1', 'Gm2', 'Gm3', 'Season_League'], 'Average: Delta': ['Days', 'Avg_Delta', 'Season_League']}
self.y_axis_columns = {'Average: Total': ['Avg_After'], 'Average: Per-Day': ['Avg_Today'], 'Series: Scratch': ['SS', 'Avg_After'],
'Series: Handicap': ['HS', 'Match_Points'], 'Game Comparison': ['Gm1', 'Gm2', 'Gm3', 'Avg_Before'], 'Average: Delta': ['Avg_Delta']}
self.refresh_listbox_values(plottypes, plottype_lbox, self.plottypes_strvar)
self.alternate_listbox_rowcolors(plottype_lbox, self.plottypes)
# seasonleagues = ['2017-18 Couples', '2018-19 <NAME>ramm', '2018-19 Couples']
# Season League Scroll bar
seasonleague_sb = ttk.Scrollbar(self.contentframe)
seasonleague_sb.grid(column=2, row=1, ipady=17, sticky=(S))
seasonleague_lbox.configure(yscrollcommand=seasonleague_sb.set)
seasonleague_sb.config(command=seasonleague_lbox.yview)
# Plot Type Scroll bar
plottype_sb = ttk.Scrollbar(self.contentframe)
plottype_sb.grid(column=2, row=4, ipady=17, sticky=(S))
plottype_lbox.configure(yscrollcommand=plottype_sb.set)
plottype_sb.config(command=plottype_lbox.yview)
# Bowler Scroll bar
bowlers_sb = ttk.Scrollbar(self.contentframe)
bowlers_sb.grid(column=2, row=6, ipady=17, sticky=(S))
bowlers_lbox.configure(yscrollcommand=bowlers_sb.set)
bowlers_sb.config(command=bowlers_lbox.yview)
# Set event bindings for when a plot selection is made,
plottype_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
bowlers_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
seasonleague_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
# Menu of the main frame
main_menu = Menu(self.master)
self.master.config(menu=main_menu)
# Create DataBase tab object
Database_tab = Menu(main_menu)
Load_tab = Menu(main_menu)
# create tab commands for Database tab
# Commands will be list in the order they are added from 1st is top
Database_tab.add_command(label='New', command=self.new_database)
Database_tab.add_command(label='Connect', command=functools.partial(self.connect_db, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
Database_tab.add_command(label='Current', command=self.display_current_db)
main_menu.add_cascade(label='Database', menu=Database_tab) # Add Database tab object to main_menu
# Create Load tab object
Load_tab = Menu(main_menu)
# create tab commands for Load tab
Load_tab.add_command(label='Load Match Points', command=self.load_excel)
Load_tab.add_command(label='Manual DB Corrections', command=self.manualDB_Corrections)
main_menu.add_cascade(label='Load', menu=Load_tab) # Add Load tab object to main_menu
# Starting status message
self.statusmsg.set(self.message_builder('None', ['None'], ['None']))
def _delete_window(self):
self.bowling_db.closeDBConnection()
try:
self.master.destroy()
except:
pass
def refresh_listbox_values(self, new_values, listbox, listbox_strvar):
# update listbox list and StringVar
if isinstance(new_values, str) == True:
new_values = [new_values]
# Create unique list from the new_values and the existing list items
if str(listbox) == '.!frame.plotyype':
new_values.extend(self.plottypes)
self.plottypes = sorted(list(set(new_values)))
listbox_strvar.set(value=self.plottypes)
elif str(listbox) == '.!frame.bowler':
new_values.extend(self.bowlers)
self.bowlers = sorted(list(set(new_values)))
listbox_strvar.set(value=self.bowlers)
elif str(listbox) == '.!frame.seasonleague':
new_values.extend(self.season_league)
self.season_league = sorted(list(set(new_values)))
listbox_strvar.set(value=self.season_league)
def alternate_listbox_rowcolors(self, lstbox, listbox_list):
# set the row background colors to alternate for the sake of readability
for i in range(0,len(listbox_list),2):
lstbox.itemconfigure(i, background='#f0f0ff')
def load_data(self, bowlers_lbox, seasonleague_lbox, seasonleague_selections):
print('please work bitch')
raise ValueError('A very specific bad thing happened.')
# Checks if a single season league has been selected
if seasonleague_selections == ['None'] or len(seasonleague_selections) > 1:
self.statusmsg.set('Invalid selection: Must select a single season league to load data.\n\n')
return None
csv_file_path = self.open_file(dialogtitle='Select Bowler History Data File', ftype='*.csv', fdescription='Comma Separated Values', defalut_db_path=os.path.join('t:\\', 'TC', 'Documents', 'BowlingLeagues'))
if csv_file_path == None:
self.statusmsg.set("Action Aborted: csv file not selected.\n\n")
else:
# Load csv data into database
# Monitor query time
t0 = time.clock()
self.bowling_db.loadcsvfile(csv_file_path, seasonleague_selections[0])
self.bowling_db.CommitDB()
t1 = time.clock()
t_delta = t1 - t0
self.statusmsg.set('Finished loading:\t%s\nTime Elapsed:\t%s\n' % (csv_file_path, round(t_delta,1)))
## Update the GUI listboxes
# query db to get unique bowlers & unique season leages
# Note the query returns a dataframe which is sliced into a series and
# then converted to a list
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
self.refresh_listbox_values(Season_League, seasonleague_lbox, self.season_league_strvar)
self.refresh_listbox_values(Bowler, bowlers_lbox, self.bowlers_strvar)
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
self.alternate_listbox_rowcolors(bowlers_lbox, self.bowlers)
def update_canvas(self, fig):
canvas = FigureCanvasTkAgg(fig, self.contentframe)
canvas.show()
canvas.get_tk_widget().grid(column=0, row=0, rowspan=8, sticky=(N,S,E,W))
def create_plot_preview(self, bowler, plot, seasonleagues):
# Make sure that the proper selections have been made, if not abort
if bowler == 'None' or (plot == ['None'] or len(plot) > 1) or seasonleagues == ['None']:
self.statusmsg.set('Invalid selection: Must select a bowler, at least 1 season league, and a single plot\n\n')
return None
else:
query_df = self.bowling_db.previewplotquery(columns=self.plottype_dbquery_columns[plot[0]], bowler=bowler, seasonleagues=seasonleagues)
# self.plottype_dbquery_columns = {'Average: Total': ['Days', 'Avg_After'], 'Average: Per-Day': ['Days', 'Avg_Today'], 'Series: Scratch': ['Days', 'SS'],
# 'Series: Handicap': ['Days', 'HCP', 'SS'], 'Game Comparison': ['Days', 'Gm1', 'Gm2', 'Gm3'], 'Average: Delta': ['Days', 'Avg_Delta']}
# pass the query df to the selected plotter method
if plot[0] == 'Average: Total':
fig = plotter.basic(query_df, seasonleagues, 'Avg_After', 'Overall Average', bowler)
self.update_canvas(fig)
elif plot[0] == 'Average: Per-Day':
fig = plotter.basic(query_df, seasonleagues, 'Avg_Today', "Per-Day Average", bowler)
self.update_canvas(fig)
elif plot[0] == 'Series: Scratch':
fig = plotter.basic(query_df, seasonleagues, 'SS', 'Scratch Series', bowler)
self.update_canvas(fig)
elif plot[0] == 'Series: Handicap':
fig = plotter.basic(query_df, seasonleagues, 'HS', 'Handicap Series', bowler)
self.update_canvas(fig)
elif plot[0] == 'Game Comparison':
fig = plotter.game(query_df, seasonleagues, 'Game Comparison', bowler)
self.update_canvas(fig)
elif plot[0] == 'Average: Delta':
fig = plotter.basic(query_df, seasonleagues, 'Avg_Delta', "Overall Average Change", bowler)
self.update_canvas(fig)
# Provide status message
self.statusmsg.set(self.message_builder(bowler=bowler, plots=plot, seasonleague=seasonleagues, message_appendage='\t\t\tSelected Plot Preview Created'))
def create_plot_report(self, bowler, plots, seasonleagues):
# Make sure that the proper selections have been made, if not abort
if bowler == 'None' or plots == ['None'] or seasonleagues == ['None']:
self.statusmsg.set('Invalid selection: Must select a bowler, at least 1 season league, and at least 1 plot\n\n')
return None
else:
# Get all data necessary to build all the selected plots
# create file paths for image file and json data file used to tranfser
# plot information to python script which creates plot
# I uses an independent python script to do this because I couldn't make it work otherwise
# I was getting strange behavor with matplot lib:
# closing the program when I ran - plt.savefig(plotimagefilepath, bbox_inches='tight')
# getting suck and not continuing the script when I ran - plt.show()
query_df = self.bowling_db.plotreportquery(bowler, seasonleagues)
jsonreportdata = os.path.join(utils_directory, 'Report_data.txt')
tempfigfilepath = os.path.join(utils_directory, 'TempFig.png')
# Prompt user for pdf file path that plot will be built into
pdffilepath = self.file_save(dialogtitle='Save Bowling Report to pdf', ftype='pdf', fdescription='pdf Files',
defalut_db_path=os.path.join('T:\\', 'TC', 'Documents', 'BowlingLeagues'))
# if user provided a valid pdf file path, then create the report
if pdffilepath != None:
JSON_Tools.dump_Data_To_File(jsonreportdata, df=query_df.to_json(orient='records'), seasonleagues=seasonleagues, y_axis_columns=self.y_axis_columns,
bowler=bowler, plots=plots, plotimagefilepath=tempfigfilepath, pdffilepath=pdffilepath)
stdout = check_output('python reportbuilder.py {r}'.format(r=jsonreportdata), shell=True, universal_newlines=True)
print(stdout)
# If no pdf file path is provided (user hits cancel) no report will be made
else:
self.statusmsg.set("Action Aborted, not reported created.\n\n")
def add_season_league(self, param):
seasonleague_lbox = param
new_season_league_user_entry = self.content.get()
# Check if entry is already there and that it's not an empty string
if not new_season_league_user_entry in self.season_league and new_season_league_user_entry != '':
self.statusmsg.set(value='New season league added: {sl}\n\n'.format(sl=new_season_league_user_entry))
# Adds new value to list box then clears the entry
self.refresh_listbox_values(new_season_league_user_entry, seasonleague_lbox, self.season_league_strvar)
self.content.set(value='')
# color codes the alternating rows
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
else:
self.statusmsg.set(value='Invalid Season League Entry "{sl}". Value either already exists or is blank.\n\n'.format(sl=new_season_league_user_entry))
def parce_selections(self, event, param):
# Gets the list box selections and sends them to the
# call back depending on the event
# Get the currently selected index(es) for each list box
plot_idxs_raw = param['plottype_lbox'].curselection()
bowler_idxs_raw = param['bowlers_lbox'].curselection()
seasonleague_indx_raw = param['seasonleague_lbox'].curselection()
# Create a list from the indexes determined above
if len(plot_idxs_raw)!=0:
plot_idxs = [int(i) for i in plot_idxs_raw]
plots = [self.plottypes[i] for i in plot_idxs]
else:
plots = ['None']
if len(bowler_idxs_raw)!=0:
bowler = self.bowlers[int(bowler_idxs_raw[0])]
else:
bowler = 'None'
if len(seasonleague_indx_raw)!=0:
seasonleagues_idxs = [int(i) for i in seasonleague_indx_raw]
seasonleagues = [self.season_league[i] for i in seasonleagues_idxs]
else:
seasonleagues = ['None']
# Depending on the widget, call the correct method
# call back selector
if event == 'preview': # preview button click
self.create_plot_preview(bowler, plots, seasonleagues)
elif event == 'report': # report button click
self.create_plot_report(bowler, plots, seasonleagues)
# self.statusmsg.set(self.message_builder(bowler=bowler, plots=plots, seasonleague=seasonleagues, message_appendage='\t\t\tSelected Plot Report Created'))
elif event == 'load': # load button click
self.load_data(param['bowlers_lbox'], param['seasonleague_lbox'], seasonleagues)
elif str(event.widget) == '.!frame.plotyype': # plottype listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
elif str(event.widget) == '.!frame.bowler': # bowler listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
elif str(event.widget) == '.!frame.seasonleague': # season league listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
def message_builder(self, bowler, plots, seasonleague, message_appendage=''):
plots_string = ", ".join(plots)
seasonleague_string = ", ".join(seasonleague)
return 'Bowler Selected: {b}\nPlots Selected: {p}\nSeason Leagues Selected: {s}{m}'.format(b=bowler, p=plots_string, s=seasonleague_string, m=message_appendage)
def connect_db(self, param):
self.statusmsg.set("Define Database Connection\n\n")
temp_db_file = self.open_file()
if temp_db_file == None:
self.statusmsg.set("Action Aborted: Database connection not established.\n\n")
else:
self.statusmsg.set("New Database connection: {f}\n\n".format(f=temp_db_file))
# Update json file with new db location
self.master.file = temp_db_file
JSON_Tools.dump_Data_To_File(jsonfilepath, db_filepath = self.master.file)
# disconnect from current db then connect to selected db
self.bowling_db.closeDBConnection()
self.bowling_db = None
self.bowling_db = BowlingDB(self.master.file)
# Clear bowling & season league list boxes
self.season_league = []
self.season_league_strvar.set(value=self.season_league)
self.bowlers = []
self.bowlers_strvar.set(value=self.bowlers)
# query db to populate and refresh bowling & season league list boxes
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
self.refresh_listbox_values(Season_League, param['seasonleague_lbox'], self.season_league_strvar)
self.refresh_listbox_values(Bowler, param['bowlers_lbox'], self.bowlers_strvar)
self.alternate_listbox_rowcolors(param['seasonleague_lbox'], self.season_league)
self.alternate_listbox_rowcolors(param['bowlers_lbox'], self.bowlers)
def new_database(self):
self.statusmsg.set("Define New Database Connection\n\n")
temp_db_file = self.file_save()
if temp_db_file == None:
self.statusmsg.set("Action Aborted: Database not created.\n\n")
else:
self.statusmsg.set("New Database created: {f}\n\n".format(f=temp_db_file))
self.master.file = temp_db_file
# Update json file with new db location
self.master.file = temp_db_file
JSON_Tools.dump_Data_To_File(jsonfilepath, db_filepath = self.master.file)
# disconnect from current db then connect to selected db
self.bowling_db.closeDBConnection()
self.bowling_db = None
self.bowling_db = BowlingDB(self.master.file)
# Clear bowling & season league list boxes
self.season_league = []
self.season_league_strvar.set(value=self.season_league)
self.bowlers = []
self.bowlers_strvar.set(value=self.bowlers)
def file_save(self, dialogtitle='Create New Database', ftype='db', fdescription='Database Files', defalut_db_path=os.path.join('C:\\', 'ProgramData', 'BowlingData')):
f = filedialog.asksaveasfilename(initialdir = defalut_db_path,
title = dialogtitle, filetypes=((fdescription, '*.' + ftype),))
if f == None or f == '':
return None
elif f.split('.')[-1] == ftype:
return f
elif f.split('.')[-1] != ftype:
return f + '.' + ftype
def open_file(self, dialogtitle='Select Database Connection', ftype='*.db', fdescription='Database Files', defalut_db_path=os.path.join('C:\\', 'ProgramData', 'BowlingData')):
f = filedialog.askopenfilename(initialdir = defalut_db_path,
title = dialogtitle, filetypes=((fdescription, ftype),))
if f == None or f == '':
return None
else:
return f
def display_current_db(self):
try:
self.statusmsg.set('Current Database Connection: {db}\n\n'.format(db=self.master.file))
except AttributeError:
self.statusmsg.set('No Database Connection Established\n\n')
def load_excel(self):
f = filedialog.askopenfilename(initialdir=os.path.join('T:\\', 'TC', 'Documents', 'BowlingLeagues') ,
title = "Select Excel File with Match Point Data", filetypes=(('Excel Files', '*.xlsx'), ('Excel Macro Files', '*.xlsm'),))
if f == None or f == '':
self.statusmsg.set('No Excel File Selected Action Aborted\n\n')
return None
# Extracted data from excel file
self.bowling_db.loadexcelfile(f)
self.bowling_db.CommitDB()
self.statusmsg.set('Excel File Loaded\n\n')
def manualDB_Corrections(self):
self.bowling_db.manualDB_Corrections()
self.bowling_db.CommitDB()
self.statusmsg.set('Manual Database Corrections Implemented\n\n')
if __name__ == '__main__':
# initialize working directory & json file, if doesn't exist then create it
# JSON file contains db file path. If not found the default path will be used
utils_directory=os.path.join('C:\\', 'ProgramData', 'BowlingData')
jsonfilepath = os.path.join(utils_directory, 'bowlinginstancedata.txt')
# JSON_Tools = JSON_Tools()
if os.path.isdir(utils_directory) == False:
os.makedirs(utils_directory)
if os.path.exists(jsonfilepath) == False:
db_filepath = os.path.join(utils_directory, 'bowling.db')
JSON_Tools().dump_Data_To_File(jsonfilepath, db_filepath = os.path.join(utils_directory, 'bowling.db'))
else:
# db_filepath = JSON_Tools.Load_Data(jsonfilepath)['db_filepath']
db_filepath = JSON_Tools().Load_Data(jsonfilepath)['db_filepath']
# Create GUI
root = Tk()
bowling_app = Window(db_filepath, root)
root.mainloop() | import random
from tkinter import * # @unusedwildimport
from tkinter import ttk # @importredefinition
from tkinter import filedialog # @importredefinition
import functools
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
matplotlib.use("TkAgg")
from jsonAPI import JSON_Tools # @unresolvedimport
import os
from SQLiteAPI import BowlingDB # @unresolvedimport
from pathlib import Path
import time
import pandas as pd
import plotter # @unresolvedimport
from subprocess import check_output
class Window(Frame):
def __init__(self, db_filepath, master=None):
Frame.__init__(self, master)
self.default_fig_size = (10,4)
self.default_fig_dpi = 100
self.master.file = db_filepath
self.master = master
self.contentframe = ttk.Frame(self.master, padding=(5, 5, 5, 5))
self.bowlers = []
self.bowlers_strvar = StringVar(value=self.bowlers)
self.plottypes = []
self.plottypes_strvar = StringVar(value=self.plottypes)
self.statusmsg = StringVar()
self.season_league = []
self.season_league_strvar = StringVar(value=self.season_league)
self.content = StringVar()
self.init_window()
def init_window(self):
# Create db object
self.bowling_db = BowlingDB(self.master.file)
self.master.protocol("WM_DELETE_WINDOW", self._delete_window)
self.master.title("Bowling Data Center v2")
# Create and grid the outer content frame
self.contentframe.grid(column=0, row=0, sticky=(N,W,E,S))
self.master.grid_columnconfigure(0, weight=1)
self.master.grid_rowconfigure(0, weight=1)
# Initialize the canvas and grit it
self.update_canvas(plotter.starting_plot())
# Create the remaining widgets
seasonleague_lbox = Listbox(self.contentframe, listvariable=self.season_league_strvar, height=5, width=25,
exportselection=FALSE, selectmode=EXTENDED, name="seasonleague")
seasonleague_lbl = ttk.Label(self.contentframe, text='Season League', anchor=W)
seasonleague_btn = Button(self.contentframe, text='Add', command=functools.partial(self.add_season_league,
param=(seasonleague_lbox)))
seasonleague_lbl_entry = Entry(self.contentframe, textvariable=self.content, width=19)
plottype_lbox = Listbox(self.contentframe, listvariable=self.plottypes_strvar, height=5, width=25,
exportselection=FALSE, selectmode=EXTENDED, name="plotyype")
plot_lbl = ttk.Label(self.contentframe, text='Plot Type', anchor=W)
bowlers_lbox = Listbox(self.contentframe, listvariable=self.bowlers_strvar, height=5, width=25, exportselection=FALSE, name="bowler")
bowlers_lbl = ttk.Label(self.contentframe, text='Bowlers', anchor=W)
preview_btn = Button(self.contentframe, text='Preview', command=functools.partial(self.parce_selections, event='preview',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
report_btn = Button(self.contentframe, text='Report', command=functools.partial(self.parce_selections, event='report',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
load_btn = Button(self.contentframe, text='Load', command=functools.partial(self.parce_selections, event ='load',
param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
status = ttk.Label(self.contentframe, textvariable=self.statusmsg, anchor=W)
# Grid the remaining widgets
seasonleague_lbl.grid(column=1, row=0, sticky=(S,W))
seasonleague_lbox.grid(column=1, row=1, sticky=(S,E,W))
seasonleague_btn.grid(column=1, row=2, sticky=(S,W))
seasonleague_lbl_entry.grid(column=1, row=2, sticky=(E))
plot_lbl.grid(column=1, row=3, sticky=(S,W))
plottype_lbox.grid(column=1, row=4, sticky=(S,E,W))
bowlers_lbl.grid(column=1, row=5, sticky=(S,W))
bowlers_lbox.grid(column=1, row=6, sticky=(S,E,W))
preview_btn.grid(column=1, row=7, sticky=(S,W))
report_btn.grid(column=1, row=7, sticky=(S))
load_btn.grid(column=1, row=7, padx=15, sticky=(S,E))
status.grid(column=0, row=8, columnspan=3, sticky=(W,E))
self.contentframe.grid_columnconfigure(0, weight=1)
self.contentframe.grid_rowconfigure(0, weight=1)
## Query DB to initialize listboxes,
# query db to get unique bowlers & unique season leages
# Note the query returns a dataframe which is sliced into a series and
# then converted to a list
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
# Then Colorize alternating listbox rows
self.refresh_listbox_values(Season_League, seasonleague_lbox, self.season_league_strvar)
self.refresh_listbox_values(Bowler, bowlers_lbox, self.bowlers_strvar)
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
self.alternate_listbox_rowcolors(bowlers_lbox, self.bowlers)
# define plot types listbox
# self.plottype_dbquery_columns defines which columns will need to be queried to generate the plots
plottypes = ['Average: Total', 'Average: Per-Day', 'Series: Scratch', 'Series: Handicap', 'Game Comparison', 'Average: Delta']
self.plottype_dbquery_columns = {'Average: Total': ['Days', 'Avg_After', 'Season_League'], 'Average: Per-Day': ['Days', 'Avg_Today', 'Season_League'], 'Series: Scratch': ['Days', 'SS', 'Season_League'],
'Series: Handicap': ['Days', 'HS', 'Season_League'], 'Game Comparison': ['Days', 'Gm1', 'Gm2', 'Gm3', 'Season_League'], 'Average: Delta': ['Days', 'Avg_Delta', 'Season_League']}
self.y_axis_columns = {'Average: Total': ['Avg_After'], 'Average: Per-Day': ['Avg_Today'], 'Series: Scratch': ['SS', 'Avg_After'],
'Series: Handicap': ['HS', 'Match_Points'], 'Game Comparison': ['Gm1', 'Gm2', 'Gm3', 'Avg_Before'], 'Average: Delta': ['Avg_Delta']}
self.refresh_listbox_values(plottypes, plottype_lbox, self.plottypes_strvar)
self.alternate_listbox_rowcolors(plottype_lbox, self.plottypes)
# seasonleagues = ['2017-18 Couples', '2018-19 <NAME>ramm', '2018-19 Couples']
# Season League Scroll bar
seasonleague_sb = ttk.Scrollbar(self.contentframe)
seasonleague_sb.grid(column=2, row=1, ipady=17, sticky=(S))
seasonleague_lbox.configure(yscrollcommand=seasonleague_sb.set)
seasonleague_sb.config(command=seasonleague_lbox.yview)
# Plot Type Scroll bar
plottype_sb = ttk.Scrollbar(self.contentframe)
plottype_sb.grid(column=2, row=4, ipady=17, sticky=(S))
plottype_lbox.configure(yscrollcommand=plottype_sb.set)
plottype_sb.config(command=plottype_lbox.yview)
# Bowler Scroll bar
bowlers_sb = ttk.Scrollbar(self.contentframe)
bowlers_sb.grid(column=2, row=6, ipady=17, sticky=(S))
bowlers_lbox.configure(yscrollcommand=bowlers_sb.set)
bowlers_sb.config(command=bowlers_lbox.yview)
# Set event bindings for when a plot selection is made,
plottype_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
bowlers_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
seasonleague_lbox.bind('<<ListboxSelect>>',
functools.partial(self.parce_selections, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
# Menu of the main frame
main_menu = Menu(self.master)
self.master.config(menu=main_menu)
# Create DataBase tab object
Database_tab = Menu(main_menu)
Load_tab = Menu(main_menu)
# create tab commands for Database tab
# Commands will be list in the order they are added from 1st is top
Database_tab.add_command(label='New', command=self.new_database)
Database_tab.add_command(label='Connect', command=functools.partial(self.connect_db, param=({'plottype_lbox': plottype_lbox,
'bowlers_lbox': bowlers_lbox,
'seasonleague_lbox':seasonleague_lbox})))
Database_tab.add_command(label='Current', command=self.display_current_db)
main_menu.add_cascade(label='Database', menu=Database_tab) # Add Database tab object to main_menu
# Create Load tab object
Load_tab = Menu(main_menu)
# create tab commands for Load tab
Load_tab.add_command(label='Load Match Points', command=self.load_excel)
Load_tab.add_command(label='Manual DB Corrections', command=self.manualDB_Corrections)
main_menu.add_cascade(label='Load', menu=Load_tab) # Add Load tab object to main_menu
# Starting status message
self.statusmsg.set(self.message_builder('None', ['None'], ['None']))
def _delete_window(self):
self.bowling_db.closeDBConnection()
try:
self.master.destroy()
except:
pass
def refresh_listbox_values(self, new_values, listbox, listbox_strvar):
# update listbox list and StringVar
if isinstance(new_values, str) == True:
new_values = [new_values]
# Create unique list from the new_values and the existing list items
if str(listbox) == '.!frame.plotyype':
new_values.extend(self.plottypes)
self.plottypes = sorted(list(set(new_values)))
listbox_strvar.set(value=self.plottypes)
elif str(listbox) == '.!frame.bowler':
new_values.extend(self.bowlers)
self.bowlers = sorted(list(set(new_values)))
listbox_strvar.set(value=self.bowlers)
elif str(listbox) == '.!frame.seasonleague':
new_values.extend(self.season_league)
self.season_league = sorted(list(set(new_values)))
listbox_strvar.set(value=self.season_league)
def alternate_listbox_rowcolors(self, lstbox, listbox_list):
# set the row background colors to alternate for the sake of readability
for i in range(0,len(listbox_list),2):
lstbox.itemconfigure(i, background='#f0f0ff')
def load_data(self, bowlers_lbox, seasonleague_lbox, seasonleague_selections):
print('please work bitch')
raise ValueError('A very specific bad thing happened.')
# Checks if a single season league has been selected
if seasonleague_selections == ['None'] or len(seasonleague_selections) > 1:
self.statusmsg.set('Invalid selection: Must select a single season league to load data.\n\n')
return None
csv_file_path = self.open_file(dialogtitle='Select Bowler History Data File', ftype='*.csv', fdescription='Comma Separated Values', defalut_db_path=os.path.join('t:\\', 'TC', 'Documents', 'BowlingLeagues'))
if csv_file_path == None:
self.statusmsg.set("Action Aborted: csv file not selected.\n\n")
else:
# Load csv data into database
# Monitor query time
t0 = time.clock()
self.bowling_db.loadcsvfile(csv_file_path, seasonleague_selections[0])
self.bowling_db.CommitDB()
t1 = time.clock()
t_delta = t1 - t0
self.statusmsg.set('Finished loading:\t%s\nTime Elapsed:\t%s\n' % (csv_file_path, round(t_delta,1)))
## Update the GUI listboxes
# query db to get unique bowlers & unique season leages
# Note the query returns a dataframe which is sliced into a series and
# then converted to a list
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
self.refresh_listbox_values(Season_League, seasonleague_lbox, self.season_league_strvar)
self.refresh_listbox_values(Bowler, bowlers_lbox, self.bowlers_strvar)
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
self.alternate_listbox_rowcolors(bowlers_lbox, self.bowlers)
def update_canvas(self, fig):
canvas = FigureCanvasTkAgg(fig, self.contentframe)
canvas.show()
canvas.get_tk_widget().grid(column=0, row=0, rowspan=8, sticky=(N,S,E,W))
def create_plot_preview(self, bowler, plot, seasonleagues):
# Make sure that the proper selections have been made, if not abort
if bowler == 'None' or (plot == ['None'] or len(plot) > 1) or seasonleagues == ['None']:
self.statusmsg.set('Invalid selection: Must select a bowler, at least 1 season league, and a single plot\n\n')
return None
else:
query_df = self.bowling_db.previewplotquery(columns=self.plottype_dbquery_columns[plot[0]], bowler=bowler, seasonleagues=seasonleagues)
# self.plottype_dbquery_columns = {'Average: Total': ['Days', 'Avg_After'], 'Average: Per-Day': ['Days', 'Avg_Today'], 'Series: Scratch': ['Days', 'SS'],
# 'Series: Handicap': ['Days', 'HCP', 'SS'], 'Game Comparison': ['Days', 'Gm1', 'Gm2', 'Gm3'], 'Average: Delta': ['Days', 'Avg_Delta']}
# pass the query df to the selected plotter method
if plot[0] == 'Average: Total':
fig = plotter.basic(query_df, seasonleagues, 'Avg_After', 'Overall Average', bowler)
self.update_canvas(fig)
elif plot[0] == 'Average: Per-Day':
fig = plotter.basic(query_df, seasonleagues, 'Avg_Today', "Per-Day Average", bowler)
self.update_canvas(fig)
elif plot[0] == 'Series: Scratch':
fig = plotter.basic(query_df, seasonleagues, 'SS', 'Scratch Series', bowler)
self.update_canvas(fig)
elif plot[0] == 'Series: Handicap':
fig = plotter.basic(query_df, seasonleagues, 'HS', 'Handicap Series', bowler)
self.update_canvas(fig)
elif plot[0] == 'Game Comparison':
fig = plotter.game(query_df, seasonleagues, 'Game Comparison', bowler)
self.update_canvas(fig)
elif plot[0] == 'Average: Delta':
fig = plotter.basic(query_df, seasonleagues, 'Avg_Delta', "Overall Average Change", bowler)
self.update_canvas(fig)
# Provide status message
self.statusmsg.set(self.message_builder(bowler=bowler, plots=plot, seasonleague=seasonleagues, message_appendage='\t\t\tSelected Plot Preview Created'))
def create_plot_report(self, bowler, plots, seasonleagues):
# Make sure that the proper selections have been made, if not abort
if bowler == 'None' or plots == ['None'] or seasonleagues == ['None']:
self.statusmsg.set('Invalid selection: Must select a bowler, at least 1 season league, and at least 1 plot\n\n')
return None
else:
# Get all data necessary to build all the selected plots
# create file paths for image file and json data file used to tranfser
# plot information to python script which creates plot
# I uses an independent python script to do this because I couldn't make it work otherwise
# I was getting strange behavor with matplot lib:
# closing the program when I ran - plt.savefig(plotimagefilepath, bbox_inches='tight')
# getting suck and not continuing the script when I ran - plt.show()
query_df = self.bowling_db.plotreportquery(bowler, seasonleagues)
jsonreportdata = os.path.join(utils_directory, 'Report_data.txt')
tempfigfilepath = os.path.join(utils_directory, 'TempFig.png')
# Prompt user for pdf file path that plot will be built into
pdffilepath = self.file_save(dialogtitle='Save Bowling Report to pdf', ftype='pdf', fdescription='pdf Files',
defalut_db_path=os.path.join('T:\\', 'TC', 'Documents', 'BowlingLeagues'))
# if user provided a valid pdf file path, then create the report
if pdffilepath != None:
JSON_Tools.dump_Data_To_File(jsonreportdata, df=query_df.to_json(orient='records'), seasonleagues=seasonleagues, y_axis_columns=self.y_axis_columns,
bowler=bowler, plots=plots, plotimagefilepath=tempfigfilepath, pdffilepath=pdffilepath)
stdout = check_output('python reportbuilder.py {r}'.format(r=jsonreportdata), shell=True, universal_newlines=True)
print(stdout)
# If no pdf file path is provided (user hits cancel) no report will be made
else:
self.statusmsg.set("Action Aborted, not reported created.\n\n")
def add_season_league(self, param):
seasonleague_lbox = param
new_season_league_user_entry = self.content.get()
# Check if entry is already there and that it's not an empty string
if not new_season_league_user_entry in self.season_league and new_season_league_user_entry != '':
self.statusmsg.set(value='New season league added: {sl}\n\n'.format(sl=new_season_league_user_entry))
# Adds new value to list box then clears the entry
self.refresh_listbox_values(new_season_league_user_entry, seasonleague_lbox, self.season_league_strvar)
self.content.set(value='')
# color codes the alternating rows
self.alternate_listbox_rowcolors(seasonleague_lbox, self.season_league)
else:
self.statusmsg.set(value='Invalid Season League Entry "{sl}". Value either already exists or is blank.\n\n'.format(sl=new_season_league_user_entry))
def parce_selections(self, event, param):
# Gets the list box selections and sends them to the
# call back depending on the event
# Get the currently selected index(es) for each list box
plot_idxs_raw = param['plottype_lbox'].curselection()
bowler_idxs_raw = param['bowlers_lbox'].curselection()
seasonleague_indx_raw = param['seasonleague_lbox'].curselection()
# Create a list from the indexes determined above
if len(plot_idxs_raw)!=0:
plot_idxs = [int(i) for i in plot_idxs_raw]
plots = [self.plottypes[i] for i in plot_idxs]
else:
plots = ['None']
if len(bowler_idxs_raw)!=0:
bowler = self.bowlers[int(bowler_idxs_raw[0])]
else:
bowler = 'None'
if len(seasonleague_indx_raw)!=0:
seasonleagues_idxs = [int(i) for i in seasonleague_indx_raw]
seasonleagues = [self.season_league[i] for i in seasonleagues_idxs]
else:
seasonleagues = ['None']
# Depending on the widget, call the correct method
# call back selector
if event == 'preview': # preview button click
self.create_plot_preview(bowler, plots, seasonleagues)
elif event == 'report': # report button click
self.create_plot_report(bowler, plots, seasonleagues)
# self.statusmsg.set(self.message_builder(bowler=bowler, plots=plots, seasonleague=seasonleagues, message_appendage='\t\t\tSelected Plot Report Created'))
elif event == 'load': # load button click
self.load_data(param['bowlers_lbox'], param['seasonleague_lbox'], seasonleagues)
elif str(event.widget) == '.!frame.plotyype': # plottype listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
elif str(event.widget) == '.!frame.bowler': # bowler listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
elif str(event.widget) == '.!frame.seasonleague': # season league listbox selection
self.statusmsg.set(self.message_builder(bowler, plots, seasonleagues))
def message_builder(self, bowler, plots, seasonleague, message_appendage=''):
plots_string = ", ".join(plots)
seasonleague_string = ", ".join(seasonleague)
return 'Bowler Selected: {b}\nPlots Selected: {p}\nSeason Leagues Selected: {s}{m}'.format(b=bowler, p=plots_string, s=seasonleague_string, m=message_appendage)
def connect_db(self, param):
self.statusmsg.set("Define Database Connection\n\n")
temp_db_file = self.open_file()
if temp_db_file == None:
self.statusmsg.set("Action Aborted: Database connection not established.\n\n")
else:
self.statusmsg.set("New Database connection: {f}\n\n".format(f=temp_db_file))
# Update json file with new db location
self.master.file = temp_db_file
JSON_Tools.dump_Data_To_File(jsonfilepath, db_filepath = self.master.file)
# disconnect from current db then connect to selected db
self.bowling_db.closeDBConnection()
self.bowling_db = None
self.bowling_db = BowlingDB(self.master.file)
# Clear bowling & season league list boxes
self.season_league = []
self.season_league_strvar.set(value=self.season_league)
self.bowlers = []
self.bowlers_strvar.set(value=self.bowlers)
# query db to populate and refresh bowling & season league list boxes
Bowler = self.bowling_db.getuniquevalues(colummn='Bowler', table='bowling')['Bowler'].tolist()
Season_League = self.bowling_db.getuniquevalues(colummn='Season_League', table='bowling')['Season_League'].tolist()
# populate and refresh the Season_League & Bowler listboxes
self.refresh_listbox_values(Season_League, param['seasonleague_lbox'], self.season_league_strvar)
self.refresh_listbox_values(Bowler, param['bowlers_lbox'], self.bowlers_strvar)
self.alternate_listbox_rowcolors(param['seasonleague_lbox'], self.season_league)
self.alternate_listbox_rowcolors(param['bowlers_lbox'], self.bowlers)
def new_database(self):
self.statusmsg.set("Define New Database Connection\n\n")
temp_db_file = self.file_save()
if temp_db_file == None:
self.statusmsg.set("Action Aborted: Database not created.\n\n")
else:
self.statusmsg.set("New Database created: {f}\n\n".format(f=temp_db_file))
self.master.file = temp_db_file
# Update json file with new db location
self.master.file = temp_db_file
JSON_Tools.dump_Data_To_File(jsonfilepath, db_filepath = self.master.file)
# disconnect from current db then connect to selected db
self.bowling_db.closeDBConnection()
self.bowling_db = None
self.bowling_db = BowlingDB(self.master.file)
# Clear bowling & season league list boxes
self.season_league = []
self.season_league_strvar.set(value=self.season_league)
self.bowlers = []
self.bowlers_strvar.set(value=self.bowlers)
def file_save(self, dialogtitle='Create New Database', ftype='db', fdescription='Database Files', defalut_db_path=os.path.join('C:\\', 'ProgramData', 'BowlingData')):
f = filedialog.asksaveasfilename(initialdir = defalut_db_path,
title = dialogtitle, filetypes=((fdescription, '*.' + ftype),))
if f == None or f == '':
return None
elif f.split('.')[-1] == ftype:
return f
elif f.split('.')[-1] != ftype:
return f + '.' + ftype
def open_file(self, dialogtitle='Select Database Connection', ftype='*.db', fdescription='Database Files', defalut_db_path=os.path.join('C:\\', 'ProgramData', 'BowlingData')):
f = filedialog.askopenfilename(initialdir = defalut_db_path,
title = dialogtitle, filetypes=((fdescription, ftype),))
if f == None or f == '':
return None
else:
return f
def display_current_db(self):
try:
self.statusmsg.set('Current Database Connection: {db}\n\n'.format(db=self.master.file))
except AttributeError:
self.statusmsg.set('No Database Connection Established\n\n')
def load_excel(self):
f = filedialog.askopenfilename(initialdir=os.path.join('T:\\', 'TC', 'Documents', 'BowlingLeagues') ,
title = "Select Excel File with Match Point Data", filetypes=(('Excel Files', '*.xlsx'), ('Excel Macro Files', '*.xlsm'),))
if f == None or f == '':
self.statusmsg.set('No Excel File Selected Action Aborted\n\n')
return None
# Extracted data from excel file
self.bowling_db.loadexcelfile(f)
self.bowling_db.CommitDB()
self.statusmsg.set('Excel File Loaded\n\n')
def manualDB_Corrections(self):
self.bowling_db.manualDB_Corrections()
self.bowling_db.CommitDB()
self.statusmsg.set('Manual Database Corrections Implemented\n\n')
if __name__ == '__main__':
# initialize working directory & json file, if doesn't exist then create it
# JSON file contains db file path. If not found the default path will be used
utils_directory=os.path.join('C:\\', 'ProgramData', 'BowlingData')
jsonfilepath = os.path.join(utils_directory, 'bowlinginstancedata.txt')
# JSON_Tools = JSON_Tools()
if os.path.isdir(utils_directory) == False:
os.makedirs(utils_directory)
if os.path.exists(jsonfilepath) == False:
db_filepath = os.path.join(utils_directory, 'bowling.db')
JSON_Tools().dump_Data_To_File(jsonfilepath, db_filepath = os.path.join(utils_directory, 'bowling.db'))
else:
# db_filepath = JSON_Tools.Load_Data(jsonfilepath)['db_filepath']
db_filepath = JSON_Tools().Load_Data(jsonfilepath)['db_filepath']
# Create GUI
root = Tk()
bowling_app = Window(db_filepath, root)
root.mainloop() | en | 0.759695 | # @unusedwildimport # @importredefinition # @importredefinition # @unresolvedimport # @unresolvedimport # @unresolvedimport # Create db object # Create and grid the outer content frame # Initialize the canvas and grit it # Create the remaining widgets # Grid the remaining widgets ## Query DB to initialize listboxes, # query db to get unique bowlers & unique season leages # Note the query returns a dataframe which is sliced into a series and # then converted to a list # populate and refresh the Season_League & Bowler listboxes # Then Colorize alternating listbox rows # define plot types listbox # self.plottype_dbquery_columns defines which columns will need to be queried to generate the plots # seasonleagues = ['2017-18 Couples', '2018-19 <NAME>ramm', '2018-19 Couples'] # Season League Scroll bar # Plot Type Scroll bar # Bowler Scroll bar # Set event bindings for when a plot selection is made, # Menu of the main frame # Create DataBase tab object # create tab commands for Database tab # Commands will be list in the order they are added from 1st is top # Add Database tab object to main_menu # Create Load tab object # create tab commands for Load tab # Add Load tab object to main_menu # Starting status message # update listbox list and StringVar # Create unique list from the new_values and the existing list items # set the row background colors to alternate for the sake of readability # Checks if a single season league has been selected # Load csv data into database # Monitor query time ## Update the GUI listboxes # query db to get unique bowlers & unique season leages # Note the query returns a dataframe which is sliced into a series and # then converted to a list # populate and refresh the Season_League & Bowler listboxes # Make sure that the proper selections have been made, if not abort # self.plottype_dbquery_columns = {'Average: Total': ['Days', 'Avg_After'], 'Average: Per-Day': ['Days', 'Avg_Today'], 'Series: Scratch': ['Days', 'SS'], # 'Series: Handicap': ['Days', 'HCP', 'SS'], 'Game Comparison': ['Days', 'Gm1', 'Gm2', 'Gm3'], 'Average: Delta': ['Days', 'Avg_Delta']} # pass the query df to the selected plotter method # Provide status message # Make sure that the proper selections have been made, if not abort # Get all data necessary to build all the selected plots # create file paths for image file and json data file used to tranfser # plot information to python script which creates plot # I uses an independent python script to do this because I couldn't make it work otherwise # I was getting strange behavor with matplot lib: # closing the program when I ran - plt.savefig(plotimagefilepath, bbox_inches='tight') # getting suck and not continuing the script when I ran - plt.show() # Prompt user for pdf file path that plot will be built into # if user provided a valid pdf file path, then create the report # If no pdf file path is provided (user hits cancel) no report will be made # Check if entry is already there and that it's not an empty string # Adds new value to list box then clears the entry # color codes the alternating rows # Gets the list box selections and sends them to the # call back depending on the event # Get the currently selected index(es) for each list box # Create a list from the indexes determined above # Depending on the widget, call the correct method # call back selector # preview button click # report button click # self.statusmsg.set(self.message_builder(bowler=bowler, plots=plots, seasonleague=seasonleagues, message_appendage='\t\t\tSelected Plot Report Created')) # load button click # plottype listbox selection # bowler listbox selection # season league listbox selection # Update json file with new db location # disconnect from current db then connect to selected db # Clear bowling & season league list boxes # query db to populate and refresh bowling & season league list boxes # populate and refresh the Season_League & Bowler listboxes # Update json file with new db location # disconnect from current db then connect to selected db # Clear bowling & season league list boxes # Extracted data from excel file # initialize working directory & json file, if doesn't exist then create it # JSON file contains db file path. If not found the default path will be used # JSON_Tools = JSON_Tools() # db_filepath = JSON_Tools.Load_Data(jsonfilepath)['db_filepath'] # Create GUI | 2.428066 | 2 |
docs/python/f_written_examination_2019-03-15_ex2.py | Voldemort373/Notes-and-Reference | 30 | 6631808 | <filename>docs/python/f_written_examination_2019-03-15_ex2.py
# -*- coding: utf-8 -*-
# Copyright (c) 2019, <NAME> <<EMAIL>>
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
# SOFTWARE.
from collections import deque
def f(fn, mn):
l = deque()
digits = []
for i in range(len(fn)):
j = i % len(mn)
digits.append(int(mn[len(mn) - 1 - j]))
for idx, d in enumerate(reversed(digits)):
if idx < (len(digits) / 2):
l.append((d, digits[idx]))
result = []
for c in fn:
result.append(c)
while l:
t = l.pop()
if t[0] < len(fn) and t[1] < len(fn):
tmp = fn[t[0]]
result[t[0]] = fn[t[1]]
result[t[1]] = tmp
return "".join(result)
my_fn = input("Please provide your family name: ").strip()
my_mn = input("Please provide your matriculation number: ").strip()
print("Result:", f(my_fn, my_mn))
| <filename>docs/python/f_written_examination_2019-03-15_ex2.py
# -*- coding: utf-8 -*-
# Copyright (c) 2019, <NAME> <<EMAIL>>
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
# SOFTWARE.
from collections import deque
def f(fn, mn):
l = deque()
digits = []
for i in range(len(fn)):
j = i % len(mn)
digits.append(int(mn[len(mn) - 1 - j]))
for idx, d in enumerate(reversed(digits)):
if idx < (len(digits) / 2):
l.append((d, digits[idx]))
result = []
for c in fn:
result.append(c)
while l:
t = l.pop()
if t[0] < len(fn) and t[1] < len(fn):
tmp = fn[t[0]]
result[t[0]] = fn[t[1]]
result[t[1]] = tmp
return "".join(result)
my_fn = input("Please provide your family name: ").strip()
my_mn = input("Please provide your matriculation number: ").strip()
print("Result:", f(my_fn, my_mn))
| en | 0.623941 | # -*- coding: utf-8 -*- # Copyright (c) 2019, <NAME> <<EMAIL>> # # Permission to use, copy, modify, and/or distribute this software for any purpose # with or without fee is hereby granted, provided that the above copyright notice # and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH # REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, # OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, # DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS # SOFTWARE. | 3.601289 | 4 |
src/fomc_get_data/FomcStatement.py | michelleazee/centralbank_analysis | 18 | 6631809 | <gh_stars>10-100
from datetime import datetime
import threading
import sys
import os
import pickle
import re
import requests
from bs4 import BeautifulSoup
import numpy as np
import pandas as pd
# Import parent class
from .FomcBase import FomcBase
class FomcStatement(FomcBase):
'''
A convenient class for extracting statement from the FOMC website
Example Usage:
fomc = FomcStatement()
df = fomc.get_contents()
'''
def __init__(self, verbose = True, max_threads = 10, base_dir = '../data/FOMC/'):
super().__init__('statement', verbose, max_threads, base_dir)
def _get_links(self, from_year):
'''
Override private function that sets all the links for the contents to download on FOMC website
from from_year (=min(2015, from_year)) to the current most recent year
'''
self.links = []
self.titles = []
self.speakers = []
self.dates = []
r = requests.get(self.calendar_url)
soup = BeautifulSoup(r.text, 'html.parser')
# Getting links from current page. Meetin scripts are not available.
if self.verbose: print("Getting links for statements...")
contents = soup.find_all('a', href=re.compile('^/newsevents/pressreleases/monetary\d{8}[ax].htm'))
self.links = [content.attrs['href'] for content in contents]
self.speakers = [self._speaker_from_date(self._date_from_link(x)) for x in self.links]
self.titles = ['FOMC Statement'] * len(self.links)
self.dates = [datetime.strptime(self._date_from_link(x), '%Y-%m-%d') for x in self.links]
# Correct some date in the link does not match with the meeting date
for i, m_date in enumerate(self.dates):
if m_date == datetime(2019,10,11):
self.dates[i] = datetime(2019,10,4)
if self.verbose: print("{} links found in the current page.".format(len(self.links)))
# Archived before 2015
if from_year <= 2014:
print("Getting links from archive pages...")
for year in range(from_year, 2015):
yearly_contents = []
fomc_yearly_url = self.base_url + '/monetarypolicy/fomchistorical' + str(year) + '.htm'
r_year = requests.get(fomc_yearly_url)
soup_yearly = BeautifulSoup(r_year.text, 'html.parser')
yearly_contents = soup_yearly.findAll('a', text = 'Statement')
for yearly_content in yearly_contents:
self.links.append(yearly_content.attrs['href'])
self.speakers.append(self._speaker_from_date(self._date_from_link(yearly_content.attrs['href'])))
self.titles.append('FOMC Statement')
self.dates.append(datetime.strptime(self._date_from_link(yearly_content.attrs['href']), '%Y-%m-%d'))
# Correct some date in the link does not match with the meeting date
if self.dates[-1] == datetime(2007,6,18):
self.dates[-1] = datetime(2007,6,28)
elif self.dates[-1] == datetime(2007,8,17):
self.dates[-1] = datetime(2007,8,16)
elif self.dates[-1] == datetime(2008,1,22):
self.dates[-1] = datetime(2008,1,21)
elif self.dates[-1] == datetime(2008,3,11):
self.dates[-1] = datetime(2008,3,10)
elif self.dates[-1] == datetime(2008,10,8):
self.dates[-1] = datetime(2008,10,7)
if self.verbose: print("YEAR: {} - {} links found.".format(year, len(yearly_contents)))
print("There are total ", len(self.links), ' links for ', self.content_type)
def _add_article(self, link, index=None):
'''
Override a private function that adds a related article for 1 link into the instance variable
The index is the index in the article to add to.
Due to concurrent processing, we need to make sure the articles are stored in the right order
'''
if self.verbose:
sys.stdout.write(".")
sys.stdout.flush()
res = requests.get(self.base_url + link)
html = res.text
article = BeautifulSoup(html, 'html.parser')
paragraphs = article.findAll('p')
self.articles[index] = "\n\n[SECTION]\n\n".join([paragraph.get_text().strip() for paragraph in paragraphs]) | from datetime import datetime
import threading
import sys
import os
import pickle
import re
import requests
from bs4 import BeautifulSoup
import numpy as np
import pandas as pd
# Import parent class
from .FomcBase import FomcBase
class FomcStatement(FomcBase):
'''
A convenient class for extracting statement from the FOMC website
Example Usage:
fomc = FomcStatement()
df = fomc.get_contents()
'''
def __init__(self, verbose = True, max_threads = 10, base_dir = '../data/FOMC/'):
super().__init__('statement', verbose, max_threads, base_dir)
def _get_links(self, from_year):
'''
Override private function that sets all the links for the contents to download on FOMC website
from from_year (=min(2015, from_year)) to the current most recent year
'''
self.links = []
self.titles = []
self.speakers = []
self.dates = []
r = requests.get(self.calendar_url)
soup = BeautifulSoup(r.text, 'html.parser')
# Getting links from current page. Meetin scripts are not available.
if self.verbose: print("Getting links for statements...")
contents = soup.find_all('a', href=re.compile('^/newsevents/pressreleases/monetary\d{8}[ax].htm'))
self.links = [content.attrs['href'] for content in contents]
self.speakers = [self._speaker_from_date(self._date_from_link(x)) for x in self.links]
self.titles = ['FOMC Statement'] * len(self.links)
self.dates = [datetime.strptime(self._date_from_link(x), '%Y-%m-%d') for x in self.links]
# Correct some date in the link does not match with the meeting date
for i, m_date in enumerate(self.dates):
if m_date == datetime(2019,10,11):
self.dates[i] = datetime(2019,10,4)
if self.verbose: print("{} links found in the current page.".format(len(self.links)))
# Archived before 2015
if from_year <= 2014:
print("Getting links from archive pages...")
for year in range(from_year, 2015):
yearly_contents = []
fomc_yearly_url = self.base_url + '/monetarypolicy/fomchistorical' + str(year) + '.htm'
r_year = requests.get(fomc_yearly_url)
soup_yearly = BeautifulSoup(r_year.text, 'html.parser')
yearly_contents = soup_yearly.findAll('a', text = 'Statement')
for yearly_content in yearly_contents:
self.links.append(yearly_content.attrs['href'])
self.speakers.append(self._speaker_from_date(self._date_from_link(yearly_content.attrs['href'])))
self.titles.append('FOMC Statement')
self.dates.append(datetime.strptime(self._date_from_link(yearly_content.attrs['href']), '%Y-%m-%d'))
# Correct some date in the link does not match with the meeting date
if self.dates[-1] == datetime(2007,6,18):
self.dates[-1] = datetime(2007,6,28)
elif self.dates[-1] == datetime(2007,8,17):
self.dates[-1] = datetime(2007,8,16)
elif self.dates[-1] == datetime(2008,1,22):
self.dates[-1] = datetime(2008,1,21)
elif self.dates[-1] == datetime(2008,3,11):
self.dates[-1] = datetime(2008,3,10)
elif self.dates[-1] == datetime(2008,10,8):
self.dates[-1] = datetime(2008,10,7)
if self.verbose: print("YEAR: {} - {} links found.".format(year, len(yearly_contents)))
print("There are total ", len(self.links), ' links for ', self.content_type)
def _add_article(self, link, index=None):
'''
Override a private function that adds a related article for 1 link into the instance variable
The index is the index in the article to add to.
Due to concurrent processing, we need to make sure the articles are stored in the right order
'''
if self.verbose:
sys.stdout.write(".")
sys.stdout.flush()
res = requests.get(self.base_url + link)
html = res.text
article = BeautifulSoup(html, 'html.parser')
paragraphs = article.findAll('p')
self.articles[index] = "\n\n[SECTION]\n\n".join([paragraph.get_text().strip() for paragraph in paragraphs]) | en | 0.8427 | # Import parent class A convenient class for extracting statement from the FOMC website Example Usage: fomc = FomcStatement() df = fomc.get_contents() Override private function that sets all the links for the contents to download on FOMC website from from_year (=min(2015, from_year)) to the current most recent year # Getting links from current page. Meetin scripts are not available. # Correct some date in the link does not match with the meeting date # Archived before 2015 # Correct some date in the link does not match with the meeting date Override a private function that adds a related article for 1 link into the instance variable The index is the index in the article to add to. Due to concurrent processing, we need to make sure the articles are stored in the right order | 3.107317 | 3 |
src/gui/stdmenu.py | dougabugg/python-directory-statistics | 0 | 6631810 | <filename>src/gui/stdmenu.py<gh_stars>0
from tkinter import *
from tkinter.dialog import Dialog
from tkinter.filedialog import Open,SaveAs,Directory
from gui.walkstatus import WalkStatusPopup
from fstree.util import saveFile,loadFile
import os
class StandardMenu(Menu):
def __init__(self,callback=None,*a,**k):
super().__init__(*a,**k)
self.file_menu = file_menu = Menu(self)
self.add_cascade(label="File",menu=file_menu)
#TODO: actually set those accelerators?
file_menu.add_command(
label="New Snapshot",
## accelerator="Ctrl+N",
command=self.newSnap)
file_menu.add_command(
label="Open Snapshot",
## accelerator="Ctrl+O",
command=self.openSnap)
file_menu.add_command(
label="Save Snapshot",
## accelerator="Ctrl+S",
command=self.saveSnap,
state="disabled")
file_menu.add_separator()
#TODO: command for close option
#do we want to prompt for unsave snapshot?
file_menu.add_command(
label="Close",
accelerator="Alt+F4",
command=self._root().destroy)
self.snapshot = None
self.needsSave = False
self.callback = callback or self._callback
title = "Select Folder to create New Snapshot from"
filetypes = (
## ("Snapshot (lzma)",".json.xz"),
## ("Snapshot (gzip)","*.json.gz"),
## ("Snapshot (bzip2)","*.json.bz2"),
## ("Uncompressed Snapshot","*.json"),
## ("All Files","*"),
("All Snapshots",(".json.xz","*.json.gz","*.json.bz2","*.json")),
("Snapshot (lzma)",".json.xz"),
("Snapshot (gzip)","*.json.gz"),
("Snapshot (bzip2)","*.json.bz2"),
("Uncompressed Snapshot","*.json"),
("All Files","*"),
)
self.d = Directory(master=self._root(),title=title,initialdir=os.getcwd())
self.ofn = Open(master=self._root(),title="Open a Snapshot",initialdir=os.getcwd(),filetypes=filetypes)
self.sfn = SaveAs(master=self._root(),title="Save a Snapshot",initialdir=os.getcwd(),filetypes=filetypes,initialfile=".json.xz")
def _callback(self,snapshot):
pass
def newSnap(self):
path = self.d.show()
if path:
win = WalkStatusPopup(master=self._root(),path=path)
def callback(snapshot):
win.destroy()
if snapshot:
self.snapshot = snapshot
self.callback(snapshot)
self.file_menu.entryconfigure(3,state="normal")
self.needsSave = True
win.frame.callback = callback
win.frame.start()
def openSnap(self):
if self.needsSave == False:
path = self.ofn.show()
if path:
try:
self.snapshot = loadFile(path)
self.callback(self.snapshot)
#still allow saving of the opened snapshot i guess?
## self.file_menu.entryconfigure(3,state="disabled")
self.file_menu.entryconfigure(3,state="normal")
self.needsSave = False
except Exception as e:
Dialog(self._root(),
{"title":"Error",
"text":"There was an error while opening {}\n"\
"{}".format(path,e),
"bitmap":"warning",
"default":0,
"strings":["Ok"]})
else:
#display "do you want to discard the currently unsaved snapshot?"
choice = Dialog(self._root(),
{"title":"Confirm",
"text":"""The currently open snapshot has not been saved,
and will be lost if you open a different snapshot.
Do you want to save the current snapshot?""",
"bitmap":"warning",
"default":0,
"strings":("Save","Discard","Cancel")})
if choice.num == 0:
#save
self.saveSnap()
elif choice.num == 1:
#discard and open
self.needsSave = False
self.openSnap()
elif choice.num == 2:
#cancel
return
else:
print("Unknown choice:",choice.num)
def saveSnap(self):
path = self.sfn.show()
if path:
try:
saveFile(path,self.snapshot)
self.needsSave = False
except Exception as e:
Dialog(self._root(),
{"title":"Error",
"text":"There was an error while saving {}\n"\
"{}".format(path,e),
"bitmap":"warning",
"default":0,
"strings":["Ok"]})
| <filename>src/gui/stdmenu.py<gh_stars>0
from tkinter import *
from tkinter.dialog import Dialog
from tkinter.filedialog import Open,SaveAs,Directory
from gui.walkstatus import WalkStatusPopup
from fstree.util import saveFile,loadFile
import os
class StandardMenu(Menu):
def __init__(self,callback=None,*a,**k):
super().__init__(*a,**k)
self.file_menu = file_menu = Menu(self)
self.add_cascade(label="File",menu=file_menu)
#TODO: actually set those accelerators?
file_menu.add_command(
label="New Snapshot",
## accelerator="Ctrl+N",
command=self.newSnap)
file_menu.add_command(
label="Open Snapshot",
## accelerator="Ctrl+O",
command=self.openSnap)
file_menu.add_command(
label="Save Snapshot",
## accelerator="Ctrl+S",
command=self.saveSnap,
state="disabled")
file_menu.add_separator()
#TODO: command for close option
#do we want to prompt for unsave snapshot?
file_menu.add_command(
label="Close",
accelerator="Alt+F4",
command=self._root().destroy)
self.snapshot = None
self.needsSave = False
self.callback = callback or self._callback
title = "Select Folder to create New Snapshot from"
filetypes = (
## ("Snapshot (lzma)",".json.xz"),
## ("Snapshot (gzip)","*.json.gz"),
## ("Snapshot (bzip2)","*.json.bz2"),
## ("Uncompressed Snapshot","*.json"),
## ("All Files","*"),
("All Snapshots",(".json.xz","*.json.gz","*.json.bz2","*.json")),
("Snapshot (lzma)",".json.xz"),
("Snapshot (gzip)","*.json.gz"),
("Snapshot (bzip2)","*.json.bz2"),
("Uncompressed Snapshot","*.json"),
("All Files","*"),
)
self.d = Directory(master=self._root(),title=title,initialdir=os.getcwd())
self.ofn = Open(master=self._root(),title="Open a Snapshot",initialdir=os.getcwd(),filetypes=filetypes)
self.sfn = SaveAs(master=self._root(),title="Save a Snapshot",initialdir=os.getcwd(),filetypes=filetypes,initialfile=".json.xz")
def _callback(self,snapshot):
pass
def newSnap(self):
path = self.d.show()
if path:
win = WalkStatusPopup(master=self._root(),path=path)
def callback(snapshot):
win.destroy()
if snapshot:
self.snapshot = snapshot
self.callback(snapshot)
self.file_menu.entryconfigure(3,state="normal")
self.needsSave = True
win.frame.callback = callback
win.frame.start()
def openSnap(self):
if self.needsSave == False:
path = self.ofn.show()
if path:
try:
self.snapshot = loadFile(path)
self.callback(self.snapshot)
#still allow saving of the opened snapshot i guess?
## self.file_menu.entryconfigure(3,state="disabled")
self.file_menu.entryconfigure(3,state="normal")
self.needsSave = False
except Exception as e:
Dialog(self._root(),
{"title":"Error",
"text":"There was an error while opening {}\n"\
"{}".format(path,e),
"bitmap":"warning",
"default":0,
"strings":["Ok"]})
else:
#display "do you want to discard the currently unsaved snapshot?"
choice = Dialog(self._root(),
{"title":"Confirm",
"text":"""The currently open snapshot has not been saved,
and will be lost if you open a different snapshot.
Do you want to save the current snapshot?""",
"bitmap":"warning",
"default":0,
"strings":("Save","Discard","Cancel")})
if choice.num == 0:
#save
self.saveSnap()
elif choice.num == 1:
#discard and open
self.needsSave = False
self.openSnap()
elif choice.num == 2:
#cancel
return
else:
print("Unknown choice:",choice.num)
def saveSnap(self):
path = self.sfn.show()
if path:
try:
saveFile(path,self.snapshot)
self.needsSave = False
except Exception as e:
Dialog(self._root(),
{"title":"Error",
"text":"There was an error while saving {}\n"\
"{}".format(path,e),
"bitmap":"warning",
"default":0,
"strings":["Ok"]})
| en | 0.715456 | #TODO: actually set those accelerators? ## accelerator="Ctrl+N", ## accelerator="Ctrl+O", ## accelerator="Ctrl+S", #TODO: command for close option #do we want to prompt for unsave snapshot? ## ("Snapshot (lzma)",".json.xz"), ## ("Snapshot (gzip)","*.json.gz"), ## ("Snapshot (bzip2)","*.json.bz2"), ## ("Uncompressed Snapshot","*.json"), ## ("All Files","*"), #still allow saving of the opened snapshot i guess? ## self.file_menu.entryconfigure(3,state="disabled") #display "do you want to discard the currently unsaved snapshot?" The currently open snapshot has not been saved, and will be lost if you open a different snapshot. Do you want to save the current snapshot? #save #discard and open #cancel | 2.739969 | 3 |
main.py | rushilkhattar09/Indian-Art-Forms-Emotion-Detection- | 0 | 6631811 | import argparse
from webcam_utils import realtime_emotions
def run_realtime_emotion():
realtime_emotions()
def main():
run_realtime_emotion()
if __name__ == '__main__':
main()
| import argparse
from webcam_utils import realtime_emotions
def run_realtime_emotion():
realtime_emotions()
def main():
run_realtime_emotion()
if __name__ == '__main__':
main()
| none | 1 | 1.324415 | 1 |
|
drssms/scripts/cli.py | hvgab/drssms | 0 | 6631812 | <reponame>hvgab/drssms
"""CLI for DRSSMS package."""
from drssms import NeverAPI
import click
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@click.group()
def main():
"""Enter main script."""
# napi = NeverAPI()
# TODO: Move napi up here and use context
pass
@main.command()
@click.argument('number')
@click.argument('text')
@click.option('--ani')
def push(number, text, ani):
"""Send push SMS to number without service."""
napi = NeverAPI()
napi.login()
napi.send_push_sms(number, text, ani)
@main.command()
@click.argument('number')
@click.argument('serviceid')
@click.option('--text', help='SMS text to overwrite service. \
Remember quotationmarks')
def service(serviceid, number, text):
"""Send service SMS to number, optionally overwrite text."""
napi = NeverAPI()
napi.login()
napi.send_service_sms(number, serviceid, text)
@main.command()
@click.argument('number')
def stop(number):
"""Stop an active SMS dialog."""
napi = NeverAPI()
napi.login()
napi.stop_dialog(number)
@main.command()
@click.option('--start', help='startdate in isoformat 2018-01-31 (inclusive)')
@click.option('--end', help='enddate in isoformat 2018-01-31 (exclusive)')
@click.option('--filename', help='filename without extension. \
Default: "sms_dialoger_start-[startdate]-end-[enddate].csv"')
def download(start, end, filename, name='download-sms'):
"""Download sms dialog file.
No options: get yesterday.
Only start: 24hours from start.
Start and end: start(inclusive) to end(exclusive).
"""
napi = NeverAPI()
napi.login()
napi.download_sms_file(start, end, filename)
| """CLI for DRSSMS package."""
from drssms import NeverAPI
import click
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@click.group()
def main():
"""Enter main script."""
# napi = NeverAPI()
# TODO: Move napi up here and use context
pass
@main.command()
@click.argument('number')
@click.argument('text')
@click.option('--ani')
def push(number, text, ani):
"""Send push SMS to number without service."""
napi = NeverAPI()
napi.login()
napi.send_push_sms(number, text, ani)
@main.command()
@click.argument('number')
@click.argument('serviceid')
@click.option('--text', help='SMS text to overwrite service. \
Remember quotationmarks')
def service(serviceid, number, text):
"""Send service SMS to number, optionally overwrite text."""
napi = NeverAPI()
napi.login()
napi.send_service_sms(number, serviceid, text)
@main.command()
@click.argument('number')
def stop(number):
"""Stop an active SMS dialog."""
napi = NeverAPI()
napi.login()
napi.stop_dialog(number)
@main.command()
@click.option('--start', help='startdate in isoformat 2018-01-31 (inclusive)')
@click.option('--end', help='enddate in isoformat 2018-01-31 (exclusive)')
@click.option('--filename', help='filename without extension. \
Default: "sms_dialoger_start-[startdate]-end-[enddate].csv"')
def download(start, end, filename, name='download-sms'):
"""Download sms dialog file.
No options: get yesterday.
Only start: 24hours from start.
Start and end: start(inclusive) to end(exclusive).
"""
napi = NeverAPI()
napi.login()
napi.download_sms_file(start, end, filename) | en | 0.714713 | CLI for DRSSMS package. Enter main script. # napi = NeverAPI() # TODO: Move napi up here and use context Send push SMS to number without service. Send service SMS to number, optionally overwrite text. Stop an active SMS dialog. Download sms dialog file. No options: get yesterday. Only start: 24hours from start. Start and end: start(inclusive) to end(exclusive). | 2.570096 | 3 |
sdk/python/pulumi_cloudflare/filter.py | pulumi/pulumi-cloudflare | 35 | 6631813 | <filename>sdk/python/pulumi_cloudflare/filter.py
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['FilterArgs', 'Filter']
@pulumi.input_type
class FilterArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
zone_id: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Filter resource.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "zone_id", zone_id)
if description is not None:
pulumi.set(__self__, "description", description)
if paused is not None:
pulumi.set(__self__, "paused", paused)
if ref is not None:
pulumi.set(__self__, "ref", ref)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Input[str]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: pulumi.Input[str]):
pulumi.set(self, "zone_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def paused(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@paused.setter
def paused(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "paused", value)
@property
@pulumi.getter
def ref(self) -> Optional[pulumi.Input[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@ref.setter
def ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref", value)
@pulumi.input_type
class _FilterState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Filter resources.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if expression is not None:
pulumi.set(__self__, "expression", expression)
if paused is not None:
pulumi.set(__self__, "paused", paused)
if ref is not None:
pulumi.set(__self__, "ref", ref)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def expression(self) -> Optional[pulumi.Input[str]]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def paused(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@paused.setter
def paused(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "paused", value)
@property
@pulumi.getter
def ref(self) -> Optional[pulumi.Input[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@ref.setter
def ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class Filter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html).
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
wordpress = cloudflare.Filter("wordpress",
description="Wordpress break-in attempts that are outside of the office",
expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1",
zone_id="d41d8cd98f00b204e9800998ecf8427e")
```
## Import
Filter can be imported using a composite ID formed of zone ID and filter ID, e.g.
```sh
$ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6
```
where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters)
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FilterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html).
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
wordpress = cloudflare.Filter("wordpress",
description="Wordpress break-in attempts that are outside of the office",
expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1",
zone_id="d41d8cd98f00b204e9800998ecf8427e")
```
## Import
Filter can be imported using a composite ID formed of zone ID and filter ID, e.g.
```sh
$ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6
```
where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters)
:param str resource_name: The name of the resource.
:param FilterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FilterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FilterArgs.__new__(FilterArgs)
__props__.__dict__["description"] = description
if expression is None and not opts.urn:
raise TypeError("Missing required property 'expression'")
__props__.__dict__["expression"] = expression
__props__.__dict__["paused"] = paused
__props__.__dict__["ref"] = ref
if zone_id is None and not opts.urn:
raise TypeError("Missing required property 'zone_id'")
__props__.__dict__["zone_id"] = zone_id
super(Filter, __self__).__init__(
'cloudflare:index/filter:Filter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'Filter':
"""
Get an existing Filter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FilterState.__new__(_FilterState)
__props__.__dict__["description"] = description
__props__.__dict__["expression"] = expression
__props__.__dict__["paused"] = paused
__props__.__dict__["ref"] = ref
__props__.__dict__["zone_id"] = zone_id
return Filter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def expression(self) -> pulumi.Output[str]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def paused(self) -> pulumi.Output[Optional[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@property
@pulumi.getter
def ref(self) -> pulumi.Output[Optional[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[str]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
| <filename>sdk/python/pulumi_cloudflare/filter.py
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['FilterArgs', 'Filter']
@pulumi.input_type
class FilterArgs:
def __init__(__self__, *,
expression: pulumi.Input[str],
zone_id: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Filter resource.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "zone_id", zone_id)
if description is not None:
pulumi.set(__self__, "description", description)
if paused is not None:
pulumi.set(__self__, "paused", paused)
if ref is not None:
pulumi.set(__self__, "ref", ref)
@property
@pulumi.getter
def expression(self) -> pulumi.Input[str]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: pulumi.Input[str]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Input[str]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: pulumi.Input[str]):
pulumi.set(self, "zone_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def paused(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@paused.setter
def paused(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "paused", value)
@property
@pulumi.getter
def ref(self) -> Optional[pulumi.Input[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@ref.setter
def ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref", value)
@pulumi.input_type
class _FilterState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Filter resources.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if expression is not None:
pulumi.set(__self__, "expression", expression)
if paused is not None:
pulumi.set(__self__, "paused", paused)
if ref is not None:
pulumi.set(__self__, "ref", ref)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def expression(self) -> Optional[pulumi.Input[str]]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@expression.setter
def expression(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expression", value)
@property
@pulumi.getter
def paused(self) -> Optional[pulumi.Input[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@paused.setter
def paused(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "paused", value)
@property
@pulumi.getter
def ref(self) -> Optional[pulumi.Input[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@ref.setter
def ref(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ref", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class Filter(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html).
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
wordpress = cloudflare.Filter("wordpress",
description="Wordpress break-in attempts that are outside of the office",
expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1",
zone_id="d41d8cd98f00b204e9800998ecf8427e")
```
## Import
Filter can be imported using a composite ID formed of zone ID and filter ID, e.g.
```sh
$ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6
```
where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters)
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FilterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html).
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
wordpress = cloudflare.Filter("wordpress",
description="Wordpress break-in attempts that are outside of the office",
expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1",
zone_id="d41d8cd98f00b204e9800998ecf8427e")
```
## Import
Filter can be imported using a composite ID formed of zone ID and filter ID, e.g.
```sh
$ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6
```
where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters)
:param str resource_name: The name of the resource.
:param FilterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FilterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FilterArgs.__new__(FilterArgs)
__props__.__dict__["description"] = description
if expression is None and not opts.urn:
raise TypeError("Missing required property 'expression'")
__props__.__dict__["expression"] = expression
__props__.__dict__["paused"] = paused
__props__.__dict__["ref"] = ref
if zone_id is None and not opts.urn:
raise TypeError("Missing required property 'zone_id'")
__props__.__dict__["zone_id"] = zone_id
super(Filter, __self__).__init__(
'cloudflare:index/filter:Filter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
expression: Optional[pulumi.Input[str]] = None,
paused: Optional[pulumi.Input[bool]] = None,
ref: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'Filter':
"""
Get an existing Filter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter.
:param pulumi.Input[str] expression: The filter expression to be used.
:param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value.
:param pulumi.Input[str] ref: Short reference tag to quickly select related rules.
:param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _FilterState.__new__(_FilterState)
__props__.__dict__["description"] = description
__props__.__dict__["expression"] = expression
__props__.__dict__["paused"] = paused
__props__.__dict__["ref"] = ref
__props__.__dict__["zone_id"] = zone_id
return Filter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A note that you can use to describe the purpose of the filter.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def expression(self) -> pulumi.Output[str]:
"""
The filter expression to be used.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def paused(self) -> pulumi.Output[Optional[bool]]:
"""
Whether this filter is currently paused. Boolean value.
"""
return pulumi.get(self, "paused")
@property
@pulumi.getter
def ref(self) -> pulumi.Output[Optional[str]]:
"""
Short reference tag to quickly select related rules.
"""
return pulumi.get(self, "ref")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[str]:
"""
The DNS zone to which the Filter should be added.
"""
return pulumi.get(self, "zone_id")
| en | 0.725062 | # coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** The set of arguments for constructing a Filter resource. :param pulumi.Input[str] expression: The filter expression to be used. :param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added. :param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter. :param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value. :param pulumi.Input[str] ref: Short reference tag to quickly select related rules. The filter expression to be used. The DNS zone to which the Filter should be added. A note that you can use to describe the purpose of the filter. Whether this filter is currently paused. Boolean value. Short reference tag to quickly select related rules. Input properties used for looking up and filtering Filter resources. :param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter. :param pulumi.Input[str] expression: The filter expression to be used. :param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value. :param pulumi.Input[str] ref: Short reference tag to quickly select related rules. :param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added. A note that you can use to describe the purpose of the filter. The filter expression to be used. Whether this filter is currently paused. Boolean value. Short reference tag to quickly select related rules. The DNS zone to which the Filter should be added. Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html). ## Example Usage ```python import pulumi import pulumi_cloudflare as cloudflare wordpress = cloudflare.Filter("wordpress", description="Wordpress break-in attempts that are outside of the office", expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1", zone_id="d41d8cd98f00b204e9800998ecf8427e") ``` ## Import Filter can be imported using a composite ID formed of zone ID and filter ID, e.g. ```sh $ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6 ``` where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters) :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter. :param pulumi.Input[str] expression: The filter expression to be used. :param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value. :param pulumi.Input[str] ref: Short reference tag to quickly select related rules. :param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added. Filter expressions that can be referenced across multiple features, e.g. Firewall Rule. The expression format is similar to [Wireshark Display Filter](https://www.wireshark.org/docs/man-pages/wireshark-filter.html). ## Example Usage ```python import pulumi import pulumi_cloudflare as cloudflare wordpress = cloudflare.Filter("wordpress", description="Wordpress break-in attempts that are outside of the office", expression="(http.request.uri.path ~ \".*wp-login.php\" or http.request.uri.path ~ \".*xmlrpc.php\") and ip.src ne 192.0.2.1", zone_id="d41d8cd98f00b204e9800998ecf8427e") ``` ## Import Filter can be imported using a composite ID formed of zone ID and filter ID, e.g. ```sh $ pulumi import cloudflare:index/filter:Filter default d41d8cd98f00b204e9800998ecf8427e/9e107d9d372bb6826bd81d3542a419d6 ``` where* `d41d8cd98f00b204e9800998ecf8427e` - zone ID * `9e107d9d372bb6826bd81d3542a419d6` - filter ID as returned by [API](https://api.cloudflare.com/#zone-firewall-filters) :param str resource_name: The name of the resource. :param FilterArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. Get an existing Filter resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] description: A note that you can use to describe the purpose of the filter. :param pulumi.Input[str] expression: The filter expression to be used. :param pulumi.Input[bool] paused: Whether this filter is currently paused. Boolean value. :param pulumi.Input[str] ref: Short reference tag to quickly select related rules. :param pulumi.Input[str] zone_id: The DNS zone to which the Filter should be added. A note that you can use to describe the purpose of the filter. The filter expression to be used. Whether this filter is currently paused. Boolean value. Short reference tag to quickly select related rules. The DNS zone to which the Filter should be added. | 2.071377 | 2 |
all_repos/source/github_forks.py | briandcho/all-repos | 350 | 6631814 | from typing import Dict
from typing import NamedTuple
from all_repos import github_api
from all_repos.util import hide_api_key_repr
class Settings(NamedTuple):
api_key: str
repo: str
collaborator: bool = True
forks: bool = True
private: bool = False
archived: bool = False
base_url: str = 'https://api.github.com'
# TODO: https://github.com/python/mypy/issues/8543
def __repr__(self) -> str:
return hide_api_key_repr(self)
def list_repos(settings: Settings) -> Dict[str, str]:
repos = []
to_search = [settings.repo]
while to_search:
slug = to_search.pop()
res = github_api.get_all(
f'{settings.base_url}/repos/{slug}/forks?per_page=100',
headers={'Authorization': f'token {settings.api_key}'},
)
repos.extend(res)
to_search.extend(repo['full_name'] for repo in res if repo['forks'])
return github_api.filter_repos(
repos,
forks=settings.forks,
private=settings.private,
collaborator=settings.collaborator,
archived=settings.archived,
)
| from typing import Dict
from typing import NamedTuple
from all_repos import github_api
from all_repos.util import hide_api_key_repr
class Settings(NamedTuple):
api_key: str
repo: str
collaborator: bool = True
forks: bool = True
private: bool = False
archived: bool = False
base_url: str = 'https://api.github.com'
# TODO: https://github.com/python/mypy/issues/8543
def __repr__(self) -> str:
return hide_api_key_repr(self)
def list_repos(settings: Settings) -> Dict[str, str]:
repos = []
to_search = [settings.repo]
while to_search:
slug = to_search.pop()
res = github_api.get_all(
f'{settings.base_url}/repos/{slug}/forks?per_page=100',
headers={'Authorization': f'token {settings.api_key}'},
)
repos.extend(res)
to_search.extend(repo['full_name'] for repo in res if repo['forks'])
return github_api.filter_repos(
repos,
forks=settings.forks,
private=settings.private,
collaborator=settings.collaborator,
archived=settings.archived,
)
| en | 0.752804 | # TODO: https://github.com/python/mypy/issues/8543 | 2.642232 | 3 |
preliminaries/01_create_labels.py | chrisdrymon/greek-morph-tagger | 9 | 6631815 | import os
from bs4 import BeautifulSoup
import pickle
from preliminaries.utilities_morph import create_morph_classes
agdt_folder = os.path.join('../data', 'corpora', 'greek', 'annotated', 'perseus-771dca2', 'texts')
gorman_folder = os.path.join('../data', 'corpora', 'greek', 'annotated', 'gorman')
all_files = []
for file in sorted(os.listdir(agdt_folder))[:26]:
all_files.append(os.path.join(agdt_folder, file))
for file in sorted(os.listdir(gorman_folder)):
all_files.append(os.path.join(gorman_folder, file))
file_count = 0
py_labels = []
# Create morphology aspect classes to simplify tensor sizing and file naming. Keep them in this order.
pos, person, number, tense, mood, voice, gender, case, degree = create_morph_classes()
morphs = (pos, person, number, tense, mood, voice, gender, case, degree)
# Change this to whichever aspect of morphology labels are needed for.
relevant_morph = mood
# This is just a string that is used in the filename to be saved.
corpus_set = 'first26-gorman'
# Search through every work in the annotated Greek folder
for file in all_files:
if file[-4:] == '.xml':
file_count += 1
print(file_count, file)
# Open the files (they are XML's) with beautiful soup and search through every word in every sentence.
xml_file = open(os.path.join(file), 'r', encoding='utf-8')
soup = BeautifulSoup(xml_file, 'xml')
sentences = soup.find_all('sentence')
for sentence in sentences:
tokens = sentence.find_all(['word', 'token'])
for token in tokens:
if token.has_attr('form') and token.has_attr('postag') and token.has_attr('artificial') is False:
# Now create the label tensors.
morph_aspect_tensor = [0] * (len(relevant_morph.tags) + 1)
try:
morph_aspect_tensor[relevant_morph.tags.index(token['postag']
[morphs.index(relevant_morph)])] = 1
except IndexError:
print(sentence['id'], token['id'], token['form'])
morph_aspect_tensor[-1] = 1
except ValueError:
print(sentence['id'], token['id'], token['form'])
morph_aspect_tensor[-1] = 1
py_labels.append(morph_aspect_tensor)
print(f'Labels: {len(py_labels)}')
with open(os.path.join('../data', 'pickles', f'labels-{relevant_morph.title}-{corpus_set}-tensors.pickle'),
'wb') as outfile:
pickle.dump(py_labels, outfile)
| import os
from bs4 import BeautifulSoup
import pickle
from preliminaries.utilities_morph import create_morph_classes
agdt_folder = os.path.join('../data', 'corpora', 'greek', 'annotated', 'perseus-771dca2', 'texts')
gorman_folder = os.path.join('../data', 'corpora', 'greek', 'annotated', 'gorman')
all_files = []
for file in sorted(os.listdir(agdt_folder))[:26]:
all_files.append(os.path.join(agdt_folder, file))
for file in sorted(os.listdir(gorman_folder)):
all_files.append(os.path.join(gorman_folder, file))
file_count = 0
py_labels = []
# Create morphology aspect classes to simplify tensor sizing and file naming. Keep them in this order.
pos, person, number, tense, mood, voice, gender, case, degree = create_morph_classes()
morphs = (pos, person, number, tense, mood, voice, gender, case, degree)
# Change this to whichever aspect of morphology labels are needed for.
relevant_morph = mood
# This is just a string that is used in the filename to be saved.
corpus_set = 'first26-gorman'
# Search through every work in the annotated Greek folder
for file in all_files:
if file[-4:] == '.xml':
file_count += 1
print(file_count, file)
# Open the files (they are XML's) with beautiful soup and search through every word in every sentence.
xml_file = open(os.path.join(file), 'r', encoding='utf-8')
soup = BeautifulSoup(xml_file, 'xml')
sentences = soup.find_all('sentence')
for sentence in sentences:
tokens = sentence.find_all(['word', 'token'])
for token in tokens:
if token.has_attr('form') and token.has_attr('postag') and token.has_attr('artificial') is False:
# Now create the label tensors.
morph_aspect_tensor = [0] * (len(relevant_morph.tags) + 1)
try:
morph_aspect_tensor[relevant_morph.tags.index(token['postag']
[morphs.index(relevant_morph)])] = 1
except IndexError:
print(sentence['id'], token['id'], token['form'])
morph_aspect_tensor[-1] = 1
except ValueError:
print(sentence['id'], token['id'], token['form'])
morph_aspect_tensor[-1] = 1
py_labels.append(morph_aspect_tensor)
print(f'Labels: {len(py_labels)}')
with open(os.path.join('../data', 'pickles', f'labels-{relevant_morph.title}-{corpus_set}-tensors.pickle'),
'wb') as outfile:
pickle.dump(py_labels, outfile)
| en | 0.935288 | # Create morphology aspect classes to simplify tensor sizing and file naming. Keep them in this order. # Change this to whichever aspect of morphology labels are needed for. # This is just a string that is used in the filename to be saved. # Search through every work in the annotated Greek folder # Open the files (they are XML's) with beautiful soup and search through every word in every sentence. # Now create the label tensors. | 2.758132 | 3 |
sample_matrix_widget.py | MarinaMeyta/WhoseCppCode | 1 | 6631816 | <reponame>MarinaMeyta/WhoseCppCode
from core.sample_matrix import get_sample_matrix
from ipywidgets import Button
from IPython.display import display, clear_output
# Путь к данным
path = './data/'
outpath = './data/matricies/'
def display_matrix_widget(path, outpath):
matrix_btn = Button(description='Получить матрицу', tooltip='Click me',
icon='check', button_style='success')
def matrix_btn_click(b):
print('Пожалуйста, подождите...')
clear_output()
get_sample_matrix(path, outpath)
print('Готово.')
matrix_btn.on_click(matrix_btn_click)
display(matrix_btn)
| from core.sample_matrix import get_sample_matrix
from ipywidgets import Button
from IPython.display import display, clear_output
# Путь к данным
path = './data/'
outpath = './data/matricies/'
def display_matrix_widget(path, outpath):
matrix_btn = Button(description='Получить матрицу', tooltip='Click me',
icon='check', button_style='success')
def matrix_btn_click(b):
print('Пожалуйста, подождите...')
clear_output()
get_sample_matrix(path, outpath)
print('Готово.')
matrix_btn.on_click(matrix_btn_click)
display(matrix_btn) | ru | 0.996462 | # Путь к данным | 2.483927 | 2 |
common/utils/manopth/examples/manopth_demo.py | Alan-delete/I2L-MeshNet_RELEASE | 544 | 6631817 | import argparse
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import torch
from tqdm import tqdm
from manopth import argutils
from manopth.manolayer import ManoLayer
from manopth.demo import display_hand
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', default=1, type=int)
parser.add_argument('--cuda', action='store_true')
parser.add_argument(
'--no_display',
action='store_true',
help="Disable display output of ManoLayer given random inputs")
parser.add_argument('--side', default='left', choices=['left', 'right'])
parser.add_argument('--random_shape', action='store_true', help="Random hand shape")
parser.add_argument('--rand_mag', type=float, default=1, help="Controls pose variability")
parser.add_argument(
'--flat_hand_mean',
action='store_true',
help="Use flat hand as mean instead of average hand pose")
parser.add_argument(
'--iters',
type=int,
default=1,
help=
"Use for quick profiling of forward and backward pass accross ManoLayer"
)
parser.add_argument('--mano_root', default='mano/models')
parser.add_argument('--root_rot_mode', default='axisang', choices=['rot6d', 'axisang'])
parser.add_argument('--no_pca', action='store_true', help="Give axis-angle or rotation matrix as inputs instead of PCA coefficients")
parser.add_argument('--joint_rot_mode', default='axisang', choices=['rotmat', 'axisang'], help="Joint rotation inputs")
parser.add_argument(
'--mano_ncomps', default=6, type=int, help="Number of PCA components")
args = parser.parse_args()
argutils.print_args(args)
layer = ManoLayer(
flat_hand_mean=args.flat_hand_mean,
side=args.side,
mano_root=args.mano_root,
ncomps=args.mano_ncomps,
use_pca=not args.no_pca,
root_rot_mode=args.root_rot_mode,
joint_rot_mode=args.joint_rot_mode)
if args.root_rot_mode == 'axisang':
rot = 3
else:
rot = 6
print(rot)
if args.no_pca:
args.mano_ncomps = 45
# Generate random pose coefficients
pose_params = args.rand_mag * torch.rand(args.batch_size, args.mano_ncomps + rot)
pose_params.requires_grad = True
if args.random_shape:
shape = torch.rand(args.batch_size, 10)
else:
shape = torch.zeros(1) # Hack to act like None for PyTorch JIT
if args.cuda:
pose_params = pose_params.cuda()
shape = shape.cuda()
layer.cuda()
# Loop for forward/backward quick profiling
for idx in tqdm(range(args.iters)):
# Forward pass
verts, Jtr = layer(pose_params, th_betas=shape)
# Backward pass
loss = torch.norm(verts)
loss.backward()
if not args.no_display:
verts, Jtr = layer(pose_params, th_betas=shape)
joints = Jtr.cpu().detach()
verts = verts.cpu().detach()
# Draw obtained vertices and joints
display_hand({
'verts': verts,
'joints': joints
},
mano_faces=layer.th_faces)
| import argparse
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import torch
from tqdm import tqdm
from manopth import argutils
from manopth.manolayer import ManoLayer
from manopth.demo import display_hand
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', default=1, type=int)
parser.add_argument('--cuda', action='store_true')
parser.add_argument(
'--no_display',
action='store_true',
help="Disable display output of ManoLayer given random inputs")
parser.add_argument('--side', default='left', choices=['left', 'right'])
parser.add_argument('--random_shape', action='store_true', help="Random hand shape")
parser.add_argument('--rand_mag', type=float, default=1, help="Controls pose variability")
parser.add_argument(
'--flat_hand_mean',
action='store_true',
help="Use flat hand as mean instead of average hand pose")
parser.add_argument(
'--iters',
type=int,
default=1,
help=
"Use for quick profiling of forward and backward pass accross ManoLayer"
)
parser.add_argument('--mano_root', default='mano/models')
parser.add_argument('--root_rot_mode', default='axisang', choices=['rot6d', 'axisang'])
parser.add_argument('--no_pca', action='store_true', help="Give axis-angle or rotation matrix as inputs instead of PCA coefficients")
parser.add_argument('--joint_rot_mode', default='axisang', choices=['rotmat', 'axisang'], help="Joint rotation inputs")
parser.add_argument(
'--mano_ncomps', default=6, type=int, help="Number of PCA components")
args = parser.parse_args()
argutils.print_args(args)
layer = ManoLayer(
flat_hand_mean=args.flat_hand_mean,
side=args.side,
mano_root=args.mano_root,
ncomps=args.mano_ncomps,
use_pca=not args.no_pca,
root_rot_mode=args.root_rot_mode,
joint_rot_mode=args.joint_rot_mode)
if args.root_rot_mode == 'axisang':
rot = 3
else:
rot = 6
print(rot)
if args.no_pca:
args.mano_ncomps = 45
# Generate random pose coefficients
pose_params = args.rand_mag * torch.rand(args.batch_size, args.mano_ncomps + rot)
pose_params.requires_grad = True
if args.random_shape:
shape = torch.rand(args.batch_size, 10)
else:
shape = torch.zeros(1) # Hack to act like None for PyTorch JIT
if args.cuda:
pose_params = pose_params.cuda()
shape = shape.cuda()
layer.cuda()
# Loop for forward/backward quick profiling
for idx in tqdm(range(args.iters)):
# Forward pass
verts, Jtr = layer(pose_params, th_betas=shape)
# Backward pass
loss = torch.norm(verts)
loss.backward()
if not args.no_display:
verts, Jtr = layer(pose_params, th_betas=shape)
joints = Jtr.cpu().detach()
verts = verts.cpu().detach()
# Draw obtained vertices and joints
display_hand({
'verts': verts,
'joints': joints
},
mano_faces=layer.th_faces)
| en | 0.859467 | # Generate random pose coefficients # Hack to act like None for PyTorch JIT # Loop for forward/backward quick profiling # Forward pass # Backward pass # Draw obtained vertices and joints | 2.043217 | 2 |
blast.py | llrs/PYT-SBI | 5 | 6631818 | <filename>blast.py
#!/usr/bin/python3
# encoding: utf-8
"""This module provides a function to work with the online version of BLAST
provided by the NCBI.
@author: <NAME>, Ferran"""
import logging
import argparse
import os
import urllib
import ftplib
from Bio.Blast import NCBIWWW
from Bio.Blast import NCBIXML
from Bio import SeqIO
from Bio import Entrez
from Bio.PDB.PDBParser import PDBParser
from Bio.SeqUtils import seq1
from Bio.Blast.Applications import NcbiblastpCommandline
import contact_map as cm
import plots
Entrez.email = "<EMAIL>"
Entrez.tool = "cozmic.py"
def run_BLAST(query, blast_type, db, size):
"""Runs a blast online.
query is the file with the sequence.
db is the db to do the blast usually nr or pdb.
output is the file where the results will be stored.
size is the number of expected results.
filt is the option to filter by genus,
so only those sequence of different genus will be saved
Returns a list of dictionaries with the id of the result of the blast"""
logging.info("Starting a blast from {} on {} with {}.".format(
query, db, blast_type))
if os.path.isfile(query):
record = SeqIO.read(query, format="fasta")
result_handle = NCBIWWW.qblast(blast_type, db, record.format("fasta"),
hitlist_size=size)
else:
result_handle = NCBIWWW.qblast(blast_type, db, query,
hitlist_size=size)
blast_record = NCBIXML.read(result_handle)
return blast_record
def analyze_blast_result(blast_out, filt=True):
"""Classify the result of blast."""
logging.info("Analysing the result of blast %s.", blast_out.query_id)
sq = blast_out.query_length
id_set = []
genere_set = set()
for alignment in blast_out.alignments:
logging.debug("Analyzing alignment: %s", alignment.title)
if filt:
if "[" in alignment.title:
spiece = alignment.title.split('[')[-1].rstrip("]")
genere = spiece.split()[0]
for hsp in alignment.hsps:
percentage_identity = 100 * hsp.identities / sq
if percentage_identity > 30:
if genere not in genere_set:
values = alignment.hit_id.split("|")
id_add = {values[0]: values[1],
values[2]: values[3]}
id_set.append(id_add)
genere_set.add(genere)
else:
msg_hsp = "Finishing high-scoring pair of an alignment"
logging.debug(msg_hsp)
else:
values = alignment.hit_id.split("|")
id_set.append({values[0]: values[1], values[2]: values[3]})
else:
logging.debug("No more alignments left.")
return id_set
def retrive_sequence(id_seqs):
"""Generator downloading sequences from Entrez."""
logging.info("Downloading sequences from Entrez.")
for id_seq in id_seqs:
logging.debug("Downloading sequence {}.".format(id_seq))
handle = Entrez.efetch(db="protein", id=id_seq, rettype="fasta",
retmode="text")
yield SeqIO.read(handle, "fasta")
def filter_ids(ids, key):
"""Extract all the values of a shared key from a list."""
logging.info("Extracting ids for %s.", key)
return map(lambda x: x[key], ids)
def local_blast(query, blast_type, db, remote=True, **kwargs):
"""Function to run with the local blast program"""
logging.info("Running blast locally with {} and {}".format(query, db))
if remote:
blast_cline = NcbiblastpCommandline(query=query, db=db,
remote=True, out="blast.out",
outfmt="5", evalue=0.001, **kwargs)
else:
blast_cline = NcbiblastpCommandline(query=query, db=db, outfmt="5",
out="blast.out",
evalue=0.001, **kwargs)
print(blast_cline)
stdout, stderr = blast_cline()
logging.debug(stderr)
blast_out = open("blast.out", "w")
blast_record = NCBIXML.read(blast_out)
return blast_record
if __name__ == "__main__":
fmt = """%(asctime)s - %(filename)s - %(funcName)s - %(levelname)s
- %(message)s"""
logging.basicConfig(filename='blast.log', level=logging.DEBUG,
format=fmt)
msg = 'Runs blast online.'
args_helper = argparse.ArgumentDefaultsHelpFormatter
argparser = argparse.ArgumentParser(description=msg,
formatter_class=args_helper)
argparser.add_argument("input",
help="Id of the sequence or file ")
argparser.add_argument("output_file",
help="Output file")
argparser.add_argument("type",
help="Type of blast to perform",
choices=["blastp", "blastn", "blast", "blastx",
"tblastn", "tblastx"])
choices_db = ["nr", "pdb", "swissprot", "refseq_protein", "pat",
"env_nr", "tsa_nr"]
argparser.add_argument("db",
help="Set the database to search on.",
choices=choices_db,
default="nr")
argparser.add_argument("-s",
help="Set the number hits you want",
type=int,
default=200)
argparser.add_argument("-f",
help="If present don't filter by genus",
action='store_false',
default=True)
argparser.add_argument("-l",
help="""If present do a local blast on the db
path""",
action="store_true", default=False)
args = argparser.parse_args()
blast_result = run_BLAST(args.input, args.type, args.db, args.s)
# blast_result = local_blast(args.input, args.type, args.db)
ides = analyze_blast_result(blast_result, args.f)
ids = list(filter_ids(ides, "gi"))
file_out = open(args.output_file, "w")
logging.info("Saving the output file {}".format(file_out))
SeqIO.write(retrive_sequence(ids), file_out, "fasta")
file_out.close()
| <filename>blast.py
#!/usr/bin/python3
# encoding: utf-8
"""This module provides a function to work with the online version of BLAST
provided by the NCBI.
@author: <NAME>, Ferran"""
import logging
import argparse
import os
import urllib
import ftplib
from Bio.Blast import NCBIWWW
from Bio.Blast import NCBIXML
from Bio import SeqIO
from Bio import Entrez
from Bio.PDB.PDBParser import PDBParser
from Bio.SeqUtils import seq1
from Bio.Blast.Applications import NcbiblastpCommandline
import contact_map as cm
import plots
Entrez.email = "<EMAIL>"
Entrez.tool = "cozmic.py"
def run_BLAST(query, blast_type, db, size):
"""Runs a blast online.
query is the file with the sequence.
db is the db to do the blast usually nr or pdb.
output is the file where the results will be stored.
size is the number of expected results.
filt is the option to filter by genus,
so only those sequence of different genus will be saved
Returns a list of dictionaries with the id of the result of the blast"""
logging.info("Starting a blast from {} on {} with {}.".format(
query, db, blast_type))
if os.path.isfile(query):
record = SeqIO.read(query, format="fasta")
result_handle = NCBIWWW.qblast(blast_type, db, record.format("fasta"),
hitlist_size=size)
else:
result_handle = NCBIWWW.qblast(blast_type, db, query,
hitlist_size=size)
blast_record = NCBIXML.read(result_handle)
return blast_record
def analyze_blast_result(blast_out, filt=True):
"""Classify the result of blast."""
logging.info("Analysing the result of blast %s.", blast_out.query_id)
sq = blast_out.query_length
id_set = []
genere_set = set()
for alignment in blast_out.alignments:
logging.debug("Analyzing alignment: %s", alignment.title)
if filt:
if "[" in alignment.title:
spiece = alignment.title.split('[')[-1].rstrip("]")
genere = spiece.split()[0]
for hsp in alignment.hsps:
percentage_identity = 100 * hsp.identities / sq
if percentage_identity > 30:
if genere not in genere_set:
values = alignment.hit_id.split("|")
id_add = {values[0]: values[1],
values[2]: values[3]}
id_set.append(id_add)
genere_set.add(genere)
else:
msg_hsp = "Finishing high-scoring pair of an alignment"
logging.debug(msg_hsp)
else:
values = alignment.hit_id.split("|")
id_set.append({values[0]: values[1], values[2]: values[3]})
else:
logging.debug("No more alignments left.")
return id_set
def retrive_sequence(id_seqs):
"""Generator downloading sequences from Entrez."""
logging.info("Downloading sequences from Entrez.")
for id_seq in id_seqs:
logging.debug("Downloading sequence {}.".format(id_seq))
handle = Entrez.efetch(db="protein", id=id_seq, rettype="fasta",
retmode="text")
yield SeqIO.read(handle, "fasta")
def filter_ids(ids, key):
"""Extract all the values of a shared key from a list."""
logging.info("Extracting ids for %s.", key)
return map(lambda x: x[key], ids)
def local_blast(query, blast_type, db, remote=True, **kwargs):
"""Function to run with the local blast program"""
logging.info("Running blast locally with {} and {}".format(query, db))
if remote:
blast_cline = NcbiblastpCommandline(query=query, db=db,
remote=True, out="blast.out",
outfmt="5", evalue=0.001, **kwargs)
else:
blast_cline = NcbiblastpCommandline(query=query, db=db, outfmt="5",
out="blast.out",
evalue=0.001, **kwargs)
print(blast_cline)
stdout, stderr = blast_cline()
logging.debug(stderr)
blast_out = open("blast.out", "w")
blast_record = NCBIXML.read(blast_out)
return blast_record
if __name__ == "__main__":
fmt = """%(asctime)s - %(filename)s - %(funcName)s - %(levelname)s
- %(message)s"""
logging.basicConfig(filename='blast.log', level=logging.DEBUG,
format=fmt)
msg = 'Runs blast online.'
args_helper = argparse.ArgumentDefaultsHelpFormatter
argparser = argparse.ArgumentParser(description=msg,
formatter_class=args_helper)
argparser.add_argument("input",
help="Id of the sequence or file ")
argparser.add_argument("output_file",
help="Output file")
argparser.add_argument("type",
help="Type of blast to perform",
choices=["blastp", "blastn", "blast", "blastx",
"tblastn", "tblastx"])
choices_db = ["nr", "pdb", "swissprot", "refseq_protein", "pat",
"env_nr", "tsa_nr"]
argparser.add_argument("db",
help="Set the database to search on.",
choices=choices_db,
default="nr")
argparser.add_argument("-s",
help="Set the number hits you want",
type=int,
default=200)
argparser.add_argument("-f",
help="If present don't filter by genus",
action='store_false',
default=True)
argparser.add_argument("-l",
help="""If present do a local blast on the db
path""",
action="store_true", default=False)
args = argparser.parse_args()
blast_result = run_BLAST(args.input, args.type, args.db, args.s)
# blast_result = local_blast(args.input, args.type, args.db)
ides = analyze_blast_result(blast_result, args.f)
ids = list(filter_ids(ides, "gi"))
file_out = open(args.output_file, "w")
logging.info("Saving the output file {}".format(file_out))
SeqIO.write(retrive_sequence(ids), file_out, "fasta")
file_out.close()
| en | 0.781635 | #!/usr/bin/python3 # encoding: utf-8 This module provides a function to work with the online version of BLAST provided by the NCBI. @author: <NAME>, Ferran Runs a blast online. query is the file with the sequence. db is the db to do the blast usually nr or pdb. output is the file where the results will be stored. size is the number of expected results. filt is the option to filter by genus, so only those sequence of different genus will be saved Returns a list of dictionaries with the id of the result of the blast Classify the result of blast. Generator downloading sequences from Entrez. Extract all the values of a shared key from a list. Function to run with the local blast program %(asctime)s - %(filename)s - %(funcName)s - %(levelname)s - %(message)s If present do a local blast on the db path # blast_result = local_blast(args.input, args.type, args.db) | 2.960788 | 3 |
seapy/couplings/__init__.py | FRidh/seapy | 8 | 6631819 | <reponame>FRidh/seapy<filename>seapy/couplings/__init__.py
"""
Couplings
=========
.. toctree::
:maxdepth: 2
Module with all available couplings.
As a general note:
* Points have dimension 0
* Lines have dimension 1
* Surfaces have dimension 2
.. automodule:: seapy.couplings.coupling
.. automodule:: seapy.couplings.couplingpointstructural
.. automodule:: seapy.couplings.couplingsurfaceacoustical
Subsystems are connected to eachother through couplings.
"""
from .couplingpointstructural import CouplingPointStructural
from .couplinglinestructural import CouplingLineStructural
from .couplingsurfaceacoustical import CouplingSurfaceAcoustical
from .couplingsurfaceplateacoustical import CouplingSurfacePlateAcoustical
from .couplingsurfaceacousticalplate import CouplingSurfaceAcousticalPlate
import inspect, sys
couplings_map = {
item[0]: item[1]
for item in inspect.getmembers(sys.modules[__name__], inspect.isclass)
}
"""
Dictionary with all available couplings.
"""
| """
Couplings
=========
.. toctree::
:maxdepth: 2
Module with all available couplings.
As a general note:
* Points have dimension 0
* Lines have dimension 1
* Surfaces have dimension 2
.. automodule:: seapy.couplings.coupling
.. automodule:: seapy.couplings.couplingpointstructural
.. automodule:: seapy.couplings.couplingsurfaceacoustical
Subsystems are connected to eachother through couplings.
"""
from .couplingpointstructural import CouplingPointStructural
from .couplinglinestructural import CouplingLineStructural
from .couplingsurfaceacoustical import CouplingSurfaceAcoustical
from .couplingsurfaceplateacoustical import CouplingSurfacePlateAcoustical
from .couplingsurfaceacousticalplate import CouplingSurfaceAcousticalPlate
import inspect, sys
couplings_map = {
item[0]: item[1]
for item in inspect.getmembers(sys.modules[__name__], inspect.isclass)
}
"""
Dictionary with all available couplings.
""" | en | 0.606905 | Couplings ========= .. toctree:: :maxdepth: 2 Module with all available couplings. As a general note: * Points have dimension 0 * Lines have dimension 1 * Surfaces have dimension 2 .. automodule:: seapy.couplings.coupling .. automodule:: seapy.couplings.couplingpointstructural .. automodule:: seapy.couplings.couplingsurfaceacoustical Subsystems are connected to eachother through couplings. Dictionary with all available couplings. | 1.67611 | 2 |
utils/utils.py | asdacsd/Mall-Shop | 0 | 6631820 | import datetime
import time
from app_api.models import SysLog
def get_order_no():
year = datetime.datetime.now().year
month = datetime.datetime.now().month
day = datetime.datetime.now().day
if month < 10:
month = f'0{month}'
if day < 10:
day = f'0{day}'
return f'{year}{month}{day}{int(time.time())}'
def log_save(user, request, flag, message, log_type):
log = SysLog(
user_name=user,
ip_addr=request.META['REMOTE_ADDR'],
action_flag=flag,
message=message,
log_type=log_type
)
log.save()
"""
"""
| import datetime
import time
from app_api.models import SysLog
def get_order_no():
year = datetime.datetime.now().year
month = datetime.datetime.now().month
day = datetime.datetime.now().day
if month < 10:
month = f'0{month}'
if day < 10:
day = f'0{day}'
return f'{year}{month}{day}{int(time.time())}'
def log_save(user, request, flag, message, log_type):
log = SysLog(
user_name=user,
ip_addr=request.META['REMOTE_ADDR'],
action_flag=flag,
message=message,
log_type=log_type
)
log.save()
"""
"""
| none | 1 | 2.608581 | 3 |
|
vnpy/trader/ui/mainwindow.py | bixia/MyVnpy | 0 | 6631821 | # -*- encoding: utf-8 -*-
'''
@File : mainwindow.py
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2021/4/1 22:43 <NAME> 1.0 None
'''
from functools import partial
from PyQt5 import QtWidgets, QtCore, QtGui
from vnpy.event import EventEngine
from vnpy.trader.engine import MainEngine
from vnpy.trader.ui.widget import TickMonitor, OrderMonitor, TradeMonitor, LogMonitor, AccountMonitor, PositionMonitor, \
TradingWidget, ConnectDialog
from vnpy.trader.utility import get_icon_path
class MainWindow(QtWidgets.QMainWindow):
"""
Main window of VN trader
"""
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
super(MainWindow, self).__init__()
self.main_engine = main_engine
self.event_engine = event_engine
self.widgets = {}
self.init_ui()
def init_ui(self):
self.setWindowTitle("VN Trader")
self.init_dock()
self.init_menu()
def init_dock(self):
trading_widget, trade_dock = self.create_dock(
TradingWidget, "交易", QtCore.Qt.LeftDockWidgetArea
)
tick_widget, tick_dock = self.create_dock(
TickMonitor, "行情", QtCore.Qt.RightDockWidgetArea
)
order_widget, order_dock = self.create_dock(
OrderMonitor, "委托", QtCore.Qt.RightDockWidgetArea
)
trade_widget, trade_dock = self.create_dock(
TradeMonitor, "成交", QtCore.Qt.RightDockWidgetArea
)
log_widget, log_dock = self.create_dock(
LogMonitor, "日志", QtCore.Qt.BottomDockWidgetArea
)
account_widget, account_dock = self.create_dock(
AccountMonitor, "资金", QtCore.Qt.BottomDockWidgetArea
)
position_widget, position_dock = self.create_dock(
PositionMonitor, "持仓", QtCore.Qt.BottomDockWidgetArea
)
def init_menu(self):
"""
init menu bar
"""
bar = self.menuBar()
sys_menu = bar.addMenu("系统")
app_menu = bar.addMenu("功能")
help_menu = bar.addMenu("帮助")
gateway_names = self.main_engine.get_all_gateway_names()
for name in gateway_names:
func = partial(self.connect, name)
icon = QtGui.QIcon(get_icon_path(__file__, "connect.ico"))
action = QtWidgets.QAction(f"链接{name}", self)
action.triggered.connect(func)
action.setIcon(icon)
sys_menu.addAction(action)
def create_dock(self,
widget_class: QtWidgets.QWidget,
name: str,
area: int):
"""
Initialize a dock widget
"""
widget = widget_class(self.main_engine, self.event_engine)
dock = QtWidgets.QDockWidget(name)
dock.setWidget(widget)
dock.setObjectName(name)
dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable)
self.addDockWidget(area, dock)
return widget, dock
def connect(self, gateway_name: str):
"""
Open connect dialog for gateway connection
"""
dialog = self.connect_dialogs.get(gateway_name, None)
if not dialog:
dialog = ConnectDialog(self.main_engine, gateway_name)
dialog.exec()
def closeEvent(self, event: QtGui.QCloseEvent) -> None:
"""
Call main engine close function before exit
"""
reply = QtWidgets.QMessageBox.question(
self,
"退出",
"确认退出?",
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No
)
if reply == QtWidgets.QMessageBox.Yes:
for widget in self.widgets.values():
widget.close()
self.main_engine.close()
event.accept()
else:
event.ignore()
| # -*- encoding: utf-8 -*-
'''
@File : mainwindow.py
@License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2021/4/1 22:43 <NAME> 1.0 None
'''
from functools import partial
from PyQt5 import QtWidgets, QtCore, QtGui
from vnpy.event import EventEngine
from vnpy.trader.engine import MainEngine
from vnpy.trader.ui.widget import TickMonitor, OrderMonitor, TradeMonitor, LogMonitor, AccountMonitor, PositionMonitor, \
TradingWidget, ConnectDialog
from vnpy.trader.utility import get_icon_path
class MainWindow(QtWidgets.QMainWindow):
"""
Main window of VN trader
"""
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
super(MainWindow, self).__init__()
self.main_engine = main_engine
self.event_engine = event_engine
self.widgets = {}
self.init_ui()
def init_ui(self):
self.setWindowTitle("VN Trader")
self.init_dock()
self.init_menu()
def init_dock(self):
trading_widget, trade_dock = self.create_dock(
TradingWidget, "交易", QtCore.Qt.LeftDockWidgetArea
)
tick_widget, tick_dock = self.create_dock(
TickMonitor, "行情", QtCore.Qt.RightDockWidgetArea
)
order_widget, order_dock = self.create_dock(
OrderMonitor, "委托", QtCore.Qt.RightDockWidgetArea
)
trade_widget, trade_dock = self.create_dock(
TradeMonitor, "成交", QtCore.Qt.RightDockWidgetArea
)
log_widget, log_dock = self.create_dock(
LogMonitor, "日志", QtCore.Qt.BottomDockWidgetArea
)
account_widget, account_dock = self.create_dock(
AccountMonitor, "资金", QtCore.Qt.BottomDockWidgetArea
)
position_widget, position_dock = self.create_dock(
PositionMonitor, "持仓", QtCore.Qt.BottomDockWidgetArea
)
def init_menu(self):
"""
init menu bar
"""
bar = self.menuBar()
sys_menu = bar.addMenu("系统")
app_menu = bar.addMenu("功能")
help_menu = bar.addMenu("帮助")
gateway_names = self.main_engine.get_all_gateway_names()
for name in gateway_names:
func = partial(self.connect, name)
icon = QtGui.QIcon(get_icon_path(__file__, "connect.ico"))
action = QtWidgets.QAction(f"链接{name}", self)
action.triggered.connect(func)
action.setIcon(icon)
sys_menu.addAction(action)
def create_dock(self,
widget_class: QtWidgets.QWidget,
name: str,
area: int):
"""
Initialize a dock widget
"""
widget = widget_class(self.main_engine, self.event_engine)
dock = QtWidgets.QDockWidget(name)
dock.setWidget(widget)
dock.setObjectName(name)
dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable)
self.addDockWidget(area, dock)
return widget, dock
def connect(self, gateway_name: str):
"""
Open connect dialog for gateway connection
"""
dialog = self.connect_dialogs.get(gateway_name, None)
if not dialog:
dialog = ConnectDialog(self.main_engine, gateway_name)
dialog.exec()
def closeEvent(self, event: QtGui.QCloseEvent) -> None:
"""
Call main engine close function before exit
"""
reply = QtWidgets.QMessageBox.question(
self,
"退出",
"确认退出?",
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No,
QtWidgets.QMessageBox.No
)
if reply == QtWidgets.QMessageBox.Yes:
for widget in self.widgets.values():
widget.close()
self.main_engine.close()
event.accept()
else:
event.ignore()
| en | 0.408383 | # -*- encoding: utf-8 -*- @File : mainwindow.py @License : (C)Copyright 2017-2018, Liugroup-NLPR-CASIA @Modify Time @Author @Version @Desciption ------------ ------- -------- ----------- 2021/4/1 22:43 <NAME> 1.0 None Main window of VN trader init menu bar Initialize a dock widget Open connect dialog for gateway connection Call main engine close function before exit | 2.021024 | 2 |
lowest-common-ancestor-of-a-binary-tree-ii/lowest-common-ancestor-of-a-binary-tree-ii.py | QQuinn03/LeetHub | 0 | 6631822 | <filename>lowest-common-ancestor-of-a-binary-tree-ii/lowest-common-ancestor-of-a-binary-tree-ii.py
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
res=self.dfs(root,p,q)
res1=self.dfs(root,p,p)
res2=self.dfs(root,q,q)
if res and res1 and res2:
return res
return None
def dfs(self,root,p,q):
if not root:
return None
if root.val==p.val:
return p
if root.val ==q.val:
return q
l=self.dfs(root.left,p,q)
r=self.dfs(root.right,p,q)
if not l:
return r
if not r:
return l
return root
| <filename>lowest-common-ancestor-of-a-binary-tree-ii/lowest-common-ancestor-of-a-binary-tree-ii.py
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
res=self.dfs(root,p,q)
res1=self.dfs(root,p,p)
res2=self.dfs(root,q,q)
if res and res1 and res2:
return res
return None
def dfs(self,root,p,q):
if not root:
return None
if root.val==p.val:
return p
if root.val ==q.val:
return q
l=self.dfs(root.left,p,q)
r=self.dfs(root.right,p,q)
if not l:
return r
if not r:
return l
return root
| en | 0.60307 | # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None | 3.948271 | 4 |
alipay/aop/api/response/AlipayEbppInvoiceUserTradeQueryResponse.py | articuly/alipay-sdk-python-all | 0 | 6631823 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.InvoiceTradeInfo import InvoiceTradeInfo
class AlipayEbppInvoiceUserTradeQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayEbppInvoiceUserTradeQueryResponse, self).__init__()
self._trade_info = None
@property
def trade_info(self):
return self._trade_info
@trade_info.setter
def trade_info(self, value):
if isinstance(value, InvoiceTradeInfo):
self._trade_info = value
else:
self._trade_info = InvoiceTradeInfo.from_alipay_dict(value)
def parse_response_content(self, response_content):
response = super(AlipayEbppInvoiceUserTradeQueryResponse, self).parse_response_content(response_content)
if 'trade_info' in response:
self.trade_info = response['trade_info']
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.InvoiceTradeInfo import InvoiceTradeInfo
class AlipayEbppInvoiceUserTradeQueryResponse(AlipayResponse):
def __init__(self):
super(AlipayEbppInvoiceUserTradeQueryResponse, self).__init__()
self._trade_info = None
@property
def trade_info(self):
return self._trade_info
@trade_info.setter
def trade_info(self, value):
if isinstance(value, InvoiceTradeInfo):
self._trade_info = value
else:
self._trade_info = InvoiceTradeInfo.from_alipay_dict(value)
def parse_response_content(self, response_content):
response = super(AlipayEbppInvoiceUserTradeQueryResponse, self).parse_response_content(response_content)
if 'trade_info' in response:
self.trade_info = response['trade_info']
| en | 0.352855 | #!/usr/bin/env python # -*- coding: utf-8 -*- | 2.362891 | 2 |
gocomics_downloader/constants.py | PythonCoderAS/GoComics-Downloader | 1 | 6631824 | <filename>gocomics_downloader/constants.py<gh_stars>1-10
import datetime
import mainchecker
mainchecker.check_for_main()
image_class = 'img-fluid item-comic-image' # the class used to find the picture element
one_day = datetime.timedelta(days=1) # one day
| <filename>gocomics_downloader/constants.py<gh_stars>1-10
import datetime
import mainchecker
mainchecker.check_for_main()
image_class = 'img-fluid item-comic-image' # the class used to find the picture element
one_day = datetime.timedelta(days=1) # one day
| en | 0.742196 | # the class used to find the picture element # one day | 1.9409 | 2 |
env/Lib/site-packages/notifypy/os_notifiers/windows.py | JFEscobarM/Proyecto_final | 149 | 6631825 | <filename>env/Lib/site-packages/notifypy/os_notifiers/windows.py
import pathlib
import os
import subprocess
from xml.etree import ElementTree
import tempfile
import uuid
import codecs
from loguru import logger
from ._base import BaseNotifier
class WindowsNotifier(BaseNotifier):
def __init__(self):
"""Main Notification System for Windows. Basically ported from go-toast/toast"""
# Create the base
self._top_ps1_script = f"""
[Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.UI.Notifications.ToastNotification, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null
"""
def _generate_notification_xml(
self,
application_id,
notification_title,
notification_subtitle,
notification_icon,
notification_audio,
):
# Create the top <toast> element
top_element = ElementTree.Element("toast")
# set the duration for the top element
top_element.set("duration", "short")
# create the <visual> element
visual_element = ElementTree.SubElement(top_element, "visual")
# create <binding> element
binding_element = ElementTree.SubElement(visual_element, "binding")
# add the required attribute for this.
# For some reason, go-toast set the template attribute to "ToastGeneric"
# but it never worked for me.
binding_element.set("template", "ToastImageAndText02")
# create <image> element
image_element = ElementTree.SubElement(binding_element, "image")
# add an Id
image_element.set("id", "1")
# add the src
image_element.set("src", notification_icon)
# add the message and title
title_element = ElementTree.SubElement(binding_element, "text")
title_element.set("id", "1")
title_element.text = notification_title
message_element = ElementTree.SubElement(binding_element, "text")
message_element.set("id", "2")
message_element.text = notification_subtitle
if notification_audio:
# the user has provided his own audio file, no need to play the default sound.
audio_element = ElementTree.SubElement(top_element, "audio")
audio_element.set("silent", "true")
# Great we have a generated XML notification.
# We need to create the rest of the .ps1 file and dump it to the temporary directory
generated_ps1_file = f"""
{self._top_ps1_script}
$APP_ID = "{application_id}"
$template = @"
{ElementTree.tostring(top_element, encoding="utf-8").decode('utf-8')}
"@
$xml = New-Object Windows.Data.Xml.Dom.XmlDocument
$xml.LoadXml($template)
$toast = New-Object Windows.UI.Notifications.ToastNotification $xml
[Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier($APP_ID).Show($toast)
"""
return generated_ps1_file
def send_notification(
self,
notification_title,
notification_subtitle,
notification_icon,
application_name,
notification_audio,
):
generated_file = self._generate_notification_xml(
notification_title=notification_title,
notification_subtitle=notification_subtitle,
notification_icon=notification_icon,
application_id=application_name,
notification_audio=notification_audio,
)
if notification_audio:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen(
[
"Powershell",
f'(New-Object Media.SoundPlayer "{notification_audio}").playsync()',
],
startupinfo=startupinfo,
)
# open the temporary directory
with tempfile.TemporaryDirectory() as temp_dir:
generated_uuid_file = str(uuid.uuid4())
with codecs.open(
f"{temp_dir}/{generated_uuid_file}.ps1", "w", "utf_8_sig"
) as ps1_file:
ps1_file.write(generated_file)
# exceute the file
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen(
[
"Powershell",
"-ExecutionPolicy",
"Bypass",
"-File",
f"{generated_uuid_file}.ps1",
],
cwd=temp_dir,
startupinfo=startupinfo,
).wait()
return True
| <filename>env/Lib/site-packages/notifypy/os_notifiers/windows.py
import pathlib
import os
import subprocess
from xml.etree import ElementTree
import tempfile
import uuid
import codecs
from loguru import logger
from ._base import BaseNotifier
class WindowsNotifier(BaseNotifier):
def __init__(self):
"""Main Notification System for Windows. Basically ported from go-toast/toast"""
# Create the base
self._top_ps1_script = f"""
[Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.UI.Notifications.ToastNotification, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null
"""
def _generate_notification_xml(
self,
application_id,
notification_title,
notification_subtitle,
notification_icon,
notification_audio,
):
# Create the top <toast> element
top_element = ElementTree.Element("toast")
# set the duration for the top element
top_element.set("duration", "short")
# create the <visual> element
visual_element = ElementTree.SubElement(top_element, "visual")
# create <binding> element
binding_element = ElementTree.SubElement(visual_element, "binding")
# add the required attribute for this.
# For some reason, go-toast set the template attribute to "ToastGeneric"
# but it never worked for me.
binding_element.set("template", "ToastImageAndText02")
# create <image> element
image_element = ElementTree.SubElement(binding_element, "image")
# add an Id
image_element.set("id", "1")
# add the src
image_element.set("src", notification_icon)
# add the message and title
title_element = ElementTree.SubElement(binding_element, "text")
title_element.set("id", "1")
title_element.text = notification_title
message_element = ElementTree.SubElement(binding_element, "text")
message_element.set("id", "2")
message_element.text = notification_subtitle
if notification_audio:
# the user has provided his own audio file, no need to play the default sound.
audio_element = ElementTree.SubElement(top_element, "audio")
audio_element.set("silent", "true")
# Great we have a generated XML notification.
# We need to create the rest of the .ps1 file and dump it to the temporary directory
generated_ps1_file = f"""
{self._top_ps1_script}
$APP_ID = "{application_id}"
$template = @"
{ElementTree.tostring(top_element, encoding="utf-8").decode('utf-8')}
"@
$xml = New-Object Windows.Data.Xml.Dom.XmlDocument
$xml.LoadXml($template)
$toast = New-Object Windows.UI.Notifications.ToastNotification $xml
[Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier($APP_ID).Show($toast)
"""
return generated_ps1_file
def send_notification(
self,
notification_title,
notification_subtitle,
notification_icon,
application_name,
notification_audio,
):
generated_file = self._generate_notification_xml(
notification_title=notification_title,
notification_subtitle=notification_subtitle,
notification_icon=notification_icon,
application_id=application_name,
notification_audio=notification_audio,
)
if notification_audio:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen(
[
"Powershell",
f'(New-Object Media.SoundPlayer "{notification_audio}").playsync()',
],
startupinfo=startupinfo,
)
# open the temporary directory
with tempfile.TemporaryDirectory() as temp_dir:
generated_uuid_file = str(uuid.uuid4())
with codecs.open(
f"{temp_dir}/{generated_uuid_file}.ps1", "w", "utf_8_sig"
) as ps1_file:
ps1_file.write(generated_file)
# exceute the file
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen(
[
"Powershell",
"-ExecutionPolicy",
"Bypass",
"-File",
f"{generated_uuid_file}.ps1",
],
cwd=temp_dir,
startupinfo=startupinfo,
).wait()
return True
| en | 0.670376 | Main Notification System for Windows. Basically ported from go-toast/toast # Create the base [Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null [Windows.UI.Notifications.ToastNotification, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null [Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null # Create the top <toast> element # set the duration for the top element # create the <visual> element # create <binding> element # add the required attribute for this. # For some reason, go-toast set the template attribute to "ToastGeneric" # but it never worked for me. # create <image> element # add an Id # add the src # add the message and title # the user has provided his own audio file, no need to play the default sound. # Great we have a generated XML notification. # We need to create the rest of the .ps1 file and dump it to the temporary directory {self._top_ps1_script} $APP_ID = "{application_id}" $template = @" {ElementTree.tostring(top_element, encoding="utf-8").decode('utf-8')} "@ $xml = New-Object Windows.Data.Xml.Dom.XmlDocument $xml.LoadXml($template) $toast = New-Object Windows.UI.Notifications.ToastNotification $xml [Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier($APP_ID).Show($toast) # open the temporary directory # exceute the file | 2.096441 | 2 |
chartify/_core/radar_chart.py | vh920/chartify | 1 | 6631826 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017-2018 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for Radar Chart
"""
from chartify._core.colors import Color
from chartify._core.chart import Chart
from chartify._core.style import Style
from chartify._core.axes import BaseAxes
from chartify._core.plot import BasePlot
from chartify._core.callout import Callout
from chartify._core.options import options
import numpy as np
import pandas as pd
class RadarChart(Chart):
def __init__(self,
blank_labels=options.get_option('chart.blank_labels'),
layout='slide_50%'):
"""Create a Radar Chart instance.
Note:
Radar charts plot each vertex in counter-clockwise order starting
from the top.
Args:
blank_labels (bool): When true removes the title,
subtitle, axes, and source labels from the chart.
Default False.
layout (str): Change size & aspect ratio of the chart for
fitting into slides.
- 'slide_100%'
- 'slide_75%'
- 'slide_50%' (Suggested for Radar Charts)
- 'slide_25%'
"""
# Validate axis type input
valid_axis_types = [
'linear', 'log'
]
self._axis_type = 'linear'
self._x_axis_type, self._y_axis_type = self._axis_type, self._axis_type
if self._axis_type not in valid_axis_types:
raise ValueError('axis_type must be one of {options}'.format(
options=valid_axis_types))
self._blank_labels = options._get_value(blank_labels)
self.style = Style(self, layout)
self.figure = self._initialize_figure(self._axis_type,
self._axis_type)
self.style._apply_settings('chart')
self.callout = Callout(self)
self.axes = BaseAxes._get_axis_class(self._axis_type,
self._axis_type)(self)
self.plot = PlotRadar(self)
self._source = self._add_source_to_figure()
self._subtitle_glyph = self._add_subtitle_to_figure()
self.figure.toolbar.logo = None # Remove bokeh logo from toolbar.
# Reverse the order of vertical legends. Used with stacked plot types
# to ensure that the stack order is consistent with the legend order.
self._reverse_vertical_legend = False
# Logos disabled for now.
# self.logo = Logo(self)
# Set default for title
title = """ch.set_title('Takeaway')"""
if self._blank_labels:
title = ""
self.set_title(title)
class PlotRadar(BasePlot):
_X_COLUMN = '__xs'
_Y_COLUMN = '__ys'
_THETA_COLUMN = '__theta'
@staticmethod
def _get_thetas(num_vars):
thetas = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
# rotate theta such that the first axis is at the top
thetas += np.pi/2
return thetas
@staticmethod
def _to_xy_coords(df, r, theta, center=0, offset=0.00):
""" Returns the x and y coordinates corresponding to the magnitudes of
each variable displayed in the radar plot
"""
# offset from center of circle
ys = (df[r] + offset) * np.sin(df[theta]) + center
xs = (df[r] + offset) * np.cos(df[theta]) + center
return pd.DataFrame({'xs': xs, 'ys': ys})
def text(self,
data_frame,
radius_column,
text_column,
color_column=None,
color_order=None,
font_size='1em',
x_offset=0,
y_offset=0,
angle=0,
text_color=None,
text_align='left'):
"""Text plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
text_column (str): Column name to plot as text labels.
color_column (str, optional): Column name to group by on the
color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
font_size (str, optional): Size of text.
x_offset (int, optional): # of pixels for horizontal text offset.
Can be negative. Default: 0.
y_offset (int, optional): # of pixels for vertical text offset.
Can be negative. Default: 0.
angle (int): Degrees from horizontal for text rotation.
text_color (str): Color name or hex value.
See chartify.color_palettes.show() for available color names.
If omitted, will default to the next color in the
current color palette.
text_align (str): 'left', 'right', or 'center'
"""
text_font = self._chart.style._get_settings('text_callout_and_plot')[
'font']
if text_color:
text_color = Color(text_color).get_hex_l()
colors, color_values = [text_color], [None]
else:
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single series
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data.copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
source = self._named_column_data_source(
coord_df, series_name=color_value)
self._chart.figure.text(
text=text_column,
x=self._X_COLUMN,
y=self._Y_COLUMN,
text_font_size=font_size,
source=source,
text_color=color,
y_offset=y_offset,
x_offset=x_offset,
angle=angle,
angle_units='deg',
text_font=text_font,
y_range_name=self._y_range_name,
text_align=text_align)
return self._chart
def perimeter(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
line_dash='solid',
line_width=4,
alpha=1.0):
"""Perimeter line plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
line_dash (str, optional): Dash style for the line. One of:
- 'solid'
- 'dashed'
- 'dotted'
- 'dotdash'
- 'dashdot'
line_width (int, optional): Width of the line
alpha (float): Alpha value.
"""
settings = self._chart.style._get_settings('line_plot')
line_cap = settings['line_cap']
line_join = settings['line_join']
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
# Add endpoint
coord_df = coord_df.append(coord_df.iloc[0])
source = self._named_column_data_source(
coord_df, series_name=color_value)
color_value = str(
color_value) if color_value is not None else color_value
self._chart.figure.line(
x=self._X_COLUMN,
y=self._Y_COLUMN,
source=source,
line_width=line_width,
color=color,
line_join=line_join,
line_cap=line_cap,
legend=color_value,
line_dash=line_dash,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
def area(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
alpha=.2):
"""Area plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
alpha (float): Alpha value.
"""
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
# Add endpoint
coord_df = coord_df.append(coord_df.iloc[0])
source = self._named_column_data_source(
coord_df, series_name=color_value)
color_value = str(
color_value) if color_value is not None else color_value
self._chart.figure.patch(
x=self._X_COLUMN,
y=self._Y_COLUMN,
source=source,
color=color,
legend=color_value,
line_width=0,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
def radius(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
line_dash='solid',
line_width=4,
alpha=1.0):
"""Radius line plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
line_dash (str, optional): Dash style for the line. One of:
- 'solid'
- 'dashed'
- 'dotted'
- 'dotdash'
- 'dashdot'
line_width (int, optional): Width of the line
alpha (float): Alpha value.
"""
settings = self._chart.style._get_settings('line_plot')
line_cap = settings['line_cap']
line_join = settings['line_join']
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(
data_frame, radius_column, radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
color_value = str(
color_value) if color_value is not None else color_value
for i, r in coord_df.iterrows():
self._chart.figure.line(
x=[0, r[self._X_COLUMN]],
y=[0, r[self._Y_COLUMN]],
line_width=line_width,
color=color,
line_join=line_join,
line_cap=line_cap,
legend=color_value,
line_dash=line_dash,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2017-2018 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for Radar Chart
"""
from chartify._core.colors import Color
from chartify._core.chart import Chart
from chartify._core.style import Style
from chartify._core.axes import BaseAxes
from chartify._core.plot import BasePlot
from chartify._core.callout import Callout
from chartify._core.options import options
import numpy as np
import pandas as pd
class RadarChart(Chart):
def __init__(self,
blank_labels=options.get_option('chart.blank_labels'),
layout='slide_50%'):
"""Create a Radar Chart instance.
Note:
Radar charts plot each vertex in counter-clockwise order starting
from the top.
Args:
blank_labels (bool): When true removes the title,
subtitle, axes, and source labels from the chart.
Default False.
layout (str): Change size & aspect ratio of the chart for
fitting into slides.
- 'slide_100%'
- 'slide_75%'
- 'slide_50%' (Suggested for Radar Charts)
- 'slide_25%'
"""
# Validate axis type input
valid_axis_types = [
'linear', 'log'
]
self._axis_type = 'linear'
self._x_axis_type, self._y_axis_type = self._axis_type, self._axis_type
if self._axis_type not in valid_axis_types:
raise ValueError('axis_type must be one of {options}'.format(
options=valid_axis_types))
self._blank_labels = options._get_value(blank_labels)
self.style = Style(self, layout)
self.figure = self._initialize_figure(self._axis_type,
self._axis_type)
self.style._apply_settings('chart')
self.callout = Callout(self)
self.axes = BaseAxes._get_axis_class(self._axis_type,
self._axis_type)(self)
self.plot = PlotRadar(self)
self._source = self._add_source_to_figure()
self._subtitle_glyph = self._add_subtitle_to_figure()
self.figure.toolbar.logo = None # Remove bokeh logo from toolbar.
# Reverse the order of vertical legends. Used with stacked plot types
# to ensure that the stack order is consistent with the legend order.
self._reverse_vertical_legend = False
# Logos disabled for now.
# self.logo = Logo(self)
# Set default for title
title = """ch.set_title('Takeaway')"""
if self._blank_labels:
title = ""
self.set_title(title)
class PlotRadar(BasePlot):
_X_COLUMN = '__xs'
_Y_COLUMN = '__ys'
_THETA_COLUMN = '__theta'
@staticmethod
def _get_thetas(num_vars):
thetas = np.linspace(0, 2*np.pi, num_vars, endpoint=False)
# rotate theta such that the first axis is at the top
thetas += np.pi/2
return thetas
@staticmethod
def _to_xy_coords(df, r, theta, center=0, offset=0.00):
""" Returns the x and y coordinates corresponding to the magnitudes of
each variable displayed in the radar plot
"""
# offset from center of circle
ys = (df[r] + offset) * np.sin(df[theta]) + center
xs = (df[r] + offset) * np.cos(df[theta]) + center
return pd.DataFrame({'xs': xs, 'ys': ys})
def text(self,
data_frame,
radius_column,
text_column,
color_column=None,
color_order=None,
font_size='1em',
x_offset=0,
y_offset=0,
angle=0,
text_color=None,
text_align='left'):
"""Text plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
text_column (str): Column name to plot as text labels.
color_column (str, optional): Column name to group by on the
color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
font_size (str, optional): Size of text.
x_offset (int, optional): # of pixels for horizontal text offset.
Can be negative. Default: 0.
y_offset (int, optional): # of pixels for vertical text offset.
Can be negative. Default: 0.
angle (int): Degrees from horizontal for text rotation.
text_color (str): Color name or hex value.
See chartify.color_palettes.show() for available color names.
If omitted, will default to the next color in the
current color palette.
text_align (str): 'left', 'right', or 'center'
"""
text_font = self._chart.style._get_settings('text_callout_and_plot')[
'font']
if text_color:
text_color = Color(text_color).get_hex_l()
colors, color_values = [text_color], [None]
else:
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single series
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data.copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
source = self._named_column_data_source(
coord_df, series_name=color_value)
self._chart.figure.text(
text=text_column,
x=self._X_COLUMN,
y=self._Y_COLUMN,
text_font_size=font_size,
source=source,
text_color=color,
y_offset=y_offset,
x_offset=x_offset,
angle=angle,
angle_units='deg',
text_font=text_font,
y_range_name=self._y_range_name,
text_align=text_align)
return self._chart
def perimeter(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
line_dash='solid',
line_width=4,
alpha=1.0):
"""Perimeter line plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
line_dash (str, optional): Dash style for the line. One of:
- 'solid'
- 'dashed'
- 'dotted'
- 'dotdash'
- 'dashdot'
line_width (int, optional): Width of the line
alpha (float): Alpha value.
"""
settings = self._chart.style._get_settings('line_plot')
line_cap = settings['line_cap']
line_join = settings['line_join']
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
# Add endpoint
coord_df = coord_df.append(coord_df.iloc[0])
source = self._named_column_data_source(
coord_df, series_name=color_value)
color_value = str(
color_value) if color_value is not None else color_value
self._chart.figure.line(
x=self._X_COLUMN,
y=self._Y_COLUMN,
source=source,
line_width=line_width,
color=color,
line_join=line_join,
line_cap=line_cap,
legend=color_value,
line_dash=line_dash,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
def area(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
alpha=.2):
"""Area plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
alpha (float): Alpha value.
"""
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(data_frame,
radius_column,
radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
# Add endpoint
coord_df = coord_df.append(coord_df.iloc[0])
source = self._named_column_data_source(
coord_df, series_name=color_value)
color_value = str(
color_value) if color_value is not None else color_value
self._chart.figure.patch(
x=self._X_COLUMN,
y=self._Y_COLUMN,
source=source,
color=color,
legend=color_value,
line_width=0,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
def radius(self,
data_frame,
radius_column,
color_column=None,
color_order=None,
line_dash='solid',
line_width=4,
alpha=1.0):
"""Radius line plot.
Args:
data_frame (pandas.DataFrame): Data source for the plot.
radius_column (str): Column name containing radius values.
color_column (str, optional): Column name to group by on
the color dimension.
color_order (list, optional): List of values within the
'color_column' for specific sorting of the colors.
line_dash (str, optional): Dash style for the line. One of:
- 'solid'
- 'dashed'
- 'dotted'
- 'dotdash'
- 'dashdot'
line_width (int, optional): Width of the line
alpha (float): Alpha value.
"""
settings = self._chart.style._get_settings('line_plot')
line_cap = settings['line_cap']
line_join = settings['line_join']
colors, color_values = self._get_color_and_order(
data_frame, color_column, color_order)
self._set_numeric_axis_default_format(
data_frame, radius_column, radius_column)
for color_value, color in zip(color_values, colors):
if color_column is None: # Single line
sliced_data = data_frame
else:
sliced_data = data_frame[
data_frame[color_column] == color_value]
coord_df = sliced_data[[radius_column]].copy()
coord_df[self._THETA_COLUMN] = self._get_thetas(len(coord_df))
coord_df[[self._X_COLUMN, self._Y_COLUMN]] = self._to_xy_coords(
coord_df, radius_column, self._THETA_COLUMN)
color_value = str(
color_value) if color_value is not None else color_value
for i, r in coord_df.iterrows():
self._chart.figure.line(
x=[0, r[self._X_COLUMN]],
y=[0, r[self._Y_COLUMN]],
line_width=line_width,
color=color,
line_join=line_join,
line_cap=line_cap,
legend=color_value,
line_dash=line_dash,
alpha=alpha,
y_range_name=self._y_range_name)
# Set legend defaults if there are multiple series.
if color_column is not None:
self._chart.style._apply_settings('legend')
return self._chart
| en | 0.614686 | # -*- coding: utf-8 -*- # # Copyright (c) 2017-2018 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Module for Radar Chart Create a Radar Chart instance. Note: Radar charts plot each vertex in counter-clockwise order starting from the top. Args: blank_labels (bool): When true removes the title, subtitle, axes, and source labels from the chart. Default False. layout (str): Change size & aspect ratio of the chart for fitting into slides. - 'slide_100%' - 'slide_75%' - 'slide_50%' (Suggested for Radar Charts) - 'slide_25%' # Validate axis type input # Remove bokeh logo from toolbar. # Reverse the order of vertical legends. Used with stacked plot types # to ensure that the stack order is consistent with the legend order. # Logos disabled for now. # self.logo = Logo(self) # Set default for title ch.set_title('Takeaway') # rotate theta such that the first axis is at the top Returns the x and y coordinates corresponding to the magnitudes of each variable displayed in the radar plot # offset from center of circle Text plot. Args: data_frame (pandas.DataFrame): Data source for the plot. radius_column (str): Column name containing radius values. text_column (str): Column name to plot as text labels. color_column (str, optional): Column name to group by on the color dimension. color_order (list, optional): List of values within the 'color_column' for specific sorting of the colors. font_size (str, optional): Size of text. x_offset (int, optional): # of pixels for horizontal text offset. Can be negative. Default: 0. y_offset (int, optional): # of pixels for vertical text offset. Can be negative. Default: 0. angle (int): Degrees from horizontal for text rotation. text_color (str): Color name or hex value. See chartify.color_palettes.show() for available color names. If omitted, will default to the next color in the current color palette. text_align (str): 'left', 'right', or 'center' # Single series Perimeter line plot. Args: data_frame (pandas.DataFrame): Data source for the plot. radius_column (str): Column name containing radius values. color_column (str, optional): Column name to group by on the color dimension. color_order (list, optional): List of values within the 'color_column' for specific sorting of the colors. line_dash (str, optional): Dash style for the line. One of: - 'solid' - 'dashed' - 'dotted' - 'dotdash' - 'dashdot' line_width (int, optional): Width of the line alpha (float): Alpha value. # Single line # Add endpoint # Set legend defaults if there are multiple series. Area plot. Args: data_frame (pandas.DataFrame): Data source for the plot. radius_column (str): Column name containing radius values. color_column (str, optional): Column name to group by on the color dimension. color_order (list, optional): List of values within the 'color_column' for specific sorting of the colors. alpha (float): Alpha value. # Single line # Add endpoint # Set legend defaults if there are multiple series. Radius line plot. Args: data_frame (pandas.DataFrame): Data source for the plot. radius_column (str): Column name containing radius values. color_column (str, optional): Column name to group by on the color dimension. color_order (list, optional): List of values within the 'color_column' for specific sorting of the colors. line_dash (str, optional): Dash style for the line. One of: - 'solid' - 'dashed' - 'dotted' - 'dotdash' - 'dashdot' line_width (int, optional): Width of the line alpha (float): Alpha value. # Single line # Set legend defaults if there are multiple series. | 2.693758 | 3 |
production_app.py | JoMingyu/Flask-Server-Quickstart | 122 | 6631827 | <filename>production_app.py
import os
from app import create_app
from config.app_config import ProductionLevelConfig
from config.db_config import RemoteDBConfig
if "SECRET_KEY" not in os.environ:
raise Warning("The secret key must be passed by the <SECRET_KEY> envvar.")
application = create_app(ProductionLevelConfig, RemoteDBConfig)
| <filename>production_app.py
import os
from app import create_app
from config.app_config import ProductionLevelConfig
from config.db_config import RemoteDBConfig
if "SECRET_KEY" not in os.environ:
raise Warning("The secret key must be passed by the <SECRET_KEY> envvar.")
application = create_app(ProductionLevelConfig, RemoteDBConfig)
| none | 1 | 1.796126 | 2 |
|
data/GUM/CONLL-format/utils/webAnnotsv_to_conll.py | ToshihikoSakai/entity-recognition-datasets | 1,134 | 6631828 | <gh_stars>1000+
"""
This script can be used to convert Webanno .tsv files to CONLL.
For use with Python 2.
"""
import os
filename = 'GUM_whow_skittles.tsv'
# column indices that we need to know
word_ind = 2
iob_ind = 3
CONLLDIR_BASE = '../data_CONLL-format/'
rootDir_BASE = '../data_orig/gum/coref/tsv/'
def recursively():
""" This converts every tsv file in the rootDir_BASE to CONLL-format
and saves it in the CONLLDIR_BASE directory.
"""
CONLLDIR = CONLLDIR_BASE
rootDir = rootDir_BASE
#CONLLDIR is the directory where the new files should be written.
for dirName, subdirList, fileList in os.walk(rootDir):
newdir = CONLLDIR+dirName[len(rootDir):]
if not os.path.exists(newdir):
os.makedirs(newdir)
print 'Made directory: ', newdir
for fname in fileList:
writefile = os.path.join(newdir, fname)
readfile = os.path.join(dirName, fname)
# Parse the file and write to writefile
file_to_conllfile(readfile, writefile)
#print 'Wrote to: ', writefile
#print 'orig: ', readfile
def file_to_conllfile(filename, writefile):
""" Read in a tsv file, and write to a CONLL-2003 formatted file.
"""
sentences = file_to_sent_list(filename)
new_sentences = fix_sentences(sentences)
#writefile = CONLLDIR+filename
with open(writefile,'a+') as fd:
for sent in new_sentences:
fd.write('\n')
for tok in sent:
fd.write(tok[0] + '\t' + tok[1] + '\n')
print 'Done with file', filename
def file_to_sent_list(filename):
""" Returns a list of lists; each sublist contains tuples of the form
(word, pos, ent), but ent is in the CONLL-2012 format, ie it is given
by things like *, (PERSON* , (NORP) (using brackets rather than IOB
tags.
"""
with open(filename,'r') as fd:
L = fd.readlines()
# The document is split into pieces, so there are some more # in between these
# lines; filter them out.
L = [l for l in L if l[0]!='#']
sentences = []
sent = []
for l in L:
if l=='\n':
sentences.append(sent)
sent = []
else:
stripped = l.split()
word = stripped[word_ind]
iob = stripped[iob_ind]
sent.append((word, iob) )
return sentences
def fix_iob(iob):
if iob[-1]==']' and iob.find('[')>=1:
# might have |
if '|' in iob:
first = iob[:iob.find('|')]
second = iob[iob.find('|')+1:]
iob = first
LBP = iob.find('[')
if not iob[LBP+1:-1].isdigit():
print iob
raise ValueError("?")
iob = iob[: iob.find('[')]
if iob == '_':
iob = 'O'
return iob
def fix_sentences(sentences):
s2 = [ [(w,fix_iob(iob)) for (w,iob) in s] for s in sentences]
s2 = [x for x in s2 if x!=[]] # eliminate any extra empty spaces
return s2
############################################################################
| """
This script can be used to convert Webanno .tsv files to CONLL.
For use with Python 2.
"""
import os
filename = 'GUM_whow_skittles.tsv'
# column indices that we need to know
word_ind = 2
iob_ind = 3
CONLLDIR_BASE = '../data_CONLL-format/'
rootDir_BASE = '../data_orig/gum/coref/tsv/'
def recursively():
""" This converts every tsv file in the rootDir_BASE to CONLL-format
and saves it in the CONLLDIR_BASE directory.
"""
CONLLDIR = CONLLDIR_BASE
rootDir = rootDir_BASE
#CONLLDIR is the directory where the new files should be written.
for dirName, subdirList, fileList in os.walk(rootDir):
newdir = CONLLDIR+dirName[len(rootDir):]
if not os.path.exists(newdir):
os.makedirs(newdir)
print 'Made directory: ', newdir
for fname in fileList:
writefile = os.path.join(newdir, fname)
readfile = os.path.join(dirName, fname)
# Parse the file and write to writefile
file_to_conllfile(readfile, writefile)
#print 'Wrote to: ', writefile
#print 'orig: ', readfile
def file_to_conllfile(filename, writefile):
""" Read in a tsv file, and write to a CONLL-2003 formatted file.
"""
sentences = file_to_sent_list(filename)
new_sentences = fix_sentences(sentences)
#writefile = CONLLDIR+filename
with open(writefile,'a+') as fd:
for sent in new_sentences:
fd.write('\n')
for tok in sent:
fd.write(tok[0] + '\t' + tok[1] + '\n')
print 'Done with file', filename
def file_to_sent_list(filename):
""" Returns a list of lists; each sublist contains tuples of the form
(word, pos, ent), but ent is in the CONLL-2012 format, ie it is given
by things like *, (PERSON* , (NORP) (using brackets rather than IOB
tags.
"""
with open(filename,'r') as fd:
L = fd.readlines()
# The document is split into pieces, so there are some more # in between these
# lines; filter them out.
L = [l for l in L if l[0]!='#']
sentences = []
sent = []
for l in L:
if l=='\n':
sentences.append(sent)
sent = []
else:
stripped = l.split()
word = stripped[word_ind]
iob = stripped[iob_ind]
sent.append((word, iob) )
return sentences
def fix_iob(iob):
if iob[-1]==']' and iob.find('[')>=1:
# might have |
if '|' in iob:
first = iob[:iob.find('|')]
second = iob[iob.find('|')+1:]
iob = first
LBP = iob.find('[')
if not iob[LBP+1:-1].isdigit():
print iob
raise ValueError("?")
iob = iob[: iob.find('[')]
if iob == '_':
iob = 'O'
return iob
def fix_sentences(sentences):
s2 = [ [(w,fix_iob(iob)) for (w,iob) in s] for s in sentences]
s2 = [x for x in s2 if x!=[]] # eliminate any extra empty spaces
return s2
############################################################################ | en | 0.815888 | This script can be used to convert Webanno .tsv files to CONLL. For use with Python 2. # column indices that we need to know This converts every tsv file in the rootDir_BASE to CONLL-format and saves it in the CONLLDIR_BASE directory. #CONLLDIR is the directory where the new files should be written. # Parse the file and write to writefile #print 'Wrote to: ', writefile #print 'orig: ', readfile Read in a tsv file, and write to a CONLL-2003 formatted file. #writefile = CONLLDIR+filename Returns a list of lists; each sublist contains tuples of the form (word, pos, ent), but ent is in the CONLL-2012 format, ie it is given by things like *, (PERSON* , (NORP) (using brackets rather than IOB tags. # The document is split into pieces, so there are some more # in between these # lines; filter them out. # might have | # eliminate any extra empty spaces ############################################################################ | 3.227476 | 3 |
panda/python/serial.py | BoneE562/openpilot | 114 | 6631829 | <filename>panda/python/serial.py
# mimic a python serial port
class PandaSerial(object):
def __init__(self, panda, port, baud):
self.panda = panda
self.port = port
self.panda.set_uart_parity(self.port, 0)
self.panda.set_uart_baud(self.port, baud)
self.buf = b""
def read(self, l=1):
tt = self.panda.serial_read(self.port)
if len(tt) > 0:
#print "R: ", tt.encode("hex")
self.buf += tt
ret = self.buf[0:l]
self.buf = self.buf[l:]
return ret
def write(self, dat):
#print "W: ", dat.encode("hex")
#print ' pigeon_send("' + ''.join(map(lambda x: "\\x%02X" % ord(x), dat)) + '");'
if(isinstance(dat, bytes)):
return self.panda.serial_write(self.port, dat)
else:
return self.panda.serial_write(self.port, str.encode(dat))
def close(self):
pass
| <filename>panda/python/serial.py
# mimic a python serial port
class PandaSerial(object):
def __init__(self, panda, port, baud):
self.panda = panda
self.port = port
self.panda.set_uart_parity(self.port, 0)
self.panda.set_uart_baud(self.port, baud)
self.buf = b""
def read(self, l=1):
tt = self.panda.serial_read(self.port)
if len(tt) > 0:
#print "R: ", tt.encode("hex")
self.buf += tt
ret = self.buf[0:l]
self.buf = self.buf[l:]
return ret
def write(self, dat):
#print "W: ", dat.encode("hex")
#print ' pigeon_send("' + ''.join(map(lambda x: "\\x%02X" % ord(x), dat)) + '");'
if(isinstance(dat, bytes)):
return self.panda.serial_write(self.port, dat)
else:
return self.panda.serial_write(self.port, str.encode(dat))
def close(self):
pass
| en | 0.145214 | # mimic a python serial port #print "R: ", tt.encode("hex") #print "W: ", dat.encode("hex") #print ' pigeon_send("' + ''.join(map(lambda x: "\\x%02X" % ord(x), dat)) + '");' | 3.233653 | 3 |
task_1a_part2.py | MOLOCH-dev/EYANTRA1631_NIRIKSHAK | 0 | 6631830 | '''
*****************************************************************************************
*
* ===============================================
* Nirikshak Bot (NB) Theme (eYRC 2020-21)
* ===============================================
*
* This script is to implement Task 1A - Part 2 of Nirikshak Bot (NB) Theme (eYRC 2020-21).
*
* This software is made available on an "AS IS WHERE IS BASIS".
* Licensee/end user indemnifies and will keep e-Yantra indemnified from
* any and all claim(s) that emanate from the use of the Software or
* breach of the terms of this agreement.
*
* e-Yantra - An MHRD project under National Mission on Education using ICT (NMEICT)
*
*****************************************************************************************
'''
# Team ID: [ Team-ID ]
# Author List: [ <NAME> ]
# Filename: task_1a_part1.py
# Functions: process_video
# [ Comma separated list of functions in this file ]
# Global variables:frame_details
# [ List of global variables defined in this file ]
####################### IMPORT MODULES #######################
## You are not allowed to make any changes in this section. ##
## You have to implement this task with the three available ##
## modules for this task (numpy, opencv, os) ##
##############################################################
import cv2
import numpy as np
import os
##############################################################
# Global variable for details of frames seleced in the video will be put in this dictionary, returned from process_video function
frame_details = {}
################# ADD UTILITY FUNCTIONS HERE #################
## You can define any utility functions for your code. ##
## Please add proper comments to ensure that your code is ##
## readable and easy to understand. ##
##############################################################
##############################################################
def process_video(vid_file_path, frame_list):
global frame_details
############## ADD YOUR CODE HERE ##############
framedet2 = {} #unsorted dictionary
capture = cv2.VideoCapture(vid_file_path)
frame_count = int(capture.get(cv2.CAP_PROP_FRAME_COUNT)) #total number of frames in saved video file
indexlist=[]
imagelist = []
for i in frame_list:
i=i-1
if i==0:
_, frame = capture.read() #this reads the first frame
imagelist.append(frame)
indexlist.append(i)
elif i>0 and i<frame_count:
capture.set(cv2.CAP_PROP_POS_FRAMES,int(i)) #this sets capture to frame specified in framelist
_, frame = capture.read()
imagelist.append(frame)
indexlist.append(i)
for i in range(len(indexlist)):
index = indexlist[i]
if index>0 and index<frame_count:
img = imagelist[i]
lab = cv2.cvtColor(img,cv2.COLOR_BGR2LAB)
#the LAB colourspace stands for Lightness,
#green to Magenta colour, blue to yellow colour
b = lab[:,:,2]
#the red(ball) appears lighter than most of the background as
#it is unchanged in B component(red is extreme of A component of LAB)
ret,thresh = cv2.threshold(b,240,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
#the ball becomes white
res = cv2.bitwise_xor(b,thresh)
#the red ball appears darker due to XOR operation
#which outputs dark if both input pixels are light
ret, thresh2 = cv2.threshold(res,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
#the ball becomes white with a less noisy background
thresh3 = cv2.bitwise_xor(b,thresh2)
#the ball's red pixels appears as nearly the darkest pixels of the image
minval,maxval,minloc,maxloc = cv2.minMaxLoc(thresh3) #minVal represents the darkest intensity pixel
ret,thresh4 = cv2.threshold(thresh3,int(minval+29),255,cv2.THRESH_BINARY_INV)
#the darkest pixels are turned to white while the rest are black,
#revealing an image with minimal noise
contours,_= cv2.findContours(thresh4.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
#approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True)
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if area>6000:
if M["m00"]!=0:
cx = int(M["m10"]/M["m00"])
cy = int(M["m01"]/M["m00"])
elif M["m00"]==0:
cx,cy=0,0
framedet2[index+1]=[cx,cy]
else:
index=index-1
img = imagelist[i]
index = i
lab = cv2.cvtColor(img,cv2.COLOR_BGR2LAB)
b = lab[:,:,2]
ret,thresh = cv2.threshold(b,240,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
res = cv2.bitwise_xor(b,thresh)
ret, thresh2 = cv2.threshold(res,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
thresh3 = cv2.bitwise_xor(b,thresh2)
minval,maxval,minloc,maxloc = cv2.minMaxLoc(thresh3)
ret,thresh4 = cv2.threshold(thresh3,int(minval+29),255,cv2.THRESH_BINARY_INV)
contours,_= cv2.findContours(thresh4.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
#approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True)
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if area>6000:
if M["m00"]!=0:
cx = int(M["m10"]/M["m00"])
cy = int(M["m01"]/M["m00"])
elif M["m00"]==0:
cx,cy=0,0
framedet2[index+1]=[cx,cy]
keys = sorted(framedet2) #implemented dictionary sorting as it has a lesser time complexity than list sorting
for i in range(len(keys)):
frame_details[keys[i]] = framedet2[keys[i]] #values are printed according to new keys
##################################################
return frame_details
# NOTE: YOU ARE NOT ALLOWED TO MAKE ANY CHANGE TO THIS FUNCTION
#
# Function Name: main
# Inputs: None
# Outputs: None
# Purpose: the function first takes input for selecting one of two videos available in Videos folder
# and a input list of frame numbers for which the details are to be calculated. It runs process_video
# function on these two inputs as argument.
if __name__ == '__main__':
curr_dir_path = os.getcwd()
print('Currently working in '+ curr_dir_path)
# path directory of videos in 'Videos' folder
vid_dir_path = curr_dir_path + '/Videos/'
try:
file_count = len(os.listdir(vid_dir_path))
except Exception:
print('\n[ERROR] "Videos" folder is not found in current directory.')
exit()
print('\n============================================')
print('\nSelect the video to process from the options given below:')
print('\nFor processing ballmotion.m4v from Videos folder, enter \t=> 1')
print('\nFor processing ballmotionwhite.m4v from Videos folder, enter \t=> 2')
choice = input('\n==> "1" or "2": ')
if choice == '1':
vid_name = 'ballmotion.m4v'
vid_file_path = vid_dir_path + vid_name
print('\n\tSelected video is: ballmotion.m4v')
elif choice=='2':
vid_name = 'ballmotionwhite.m4v'
vid_file_path = vid_dir_path + vid_name
print('\n\tSelected video is: ballmotionwhite.m4v')
else:
print('\n[ERROR] You did not select from available options!')
exit()
print('\n============================================')
if os.path.exists(vid_file_path):
print('\nFound ' + vid_name)
else:
print('\n[ERROR] ' + vid_name + ' file is not found. Make sure "Videos" folders has the selected file.')
exit()
print('\n============================================')
print('\nEnter list of frame(s) you want to process, (between 1 and 404) (without space & separated by comma) (for example: 33,44,95)')
frame_list = input('\nEnter list ==> ')
frame_list = list(frame_list.split(','))
try:
for i in range(len(frame_list)):
frame_list[i] = int(frame_list[i])
print('\n\tSelected frame(s) is/are: ', frame_list)
except Exception:
print('\n[ERROR] Enter list of frame(s) correctly')
exit()
print('\n============================================')
try:
print('\nRunning process_video function on', vid_name, 'for frame following frame(s):', frame_list)
frame_details = process_video(vid_file_path, frame_list)
if type(frame_details) is dict:
print(frame_details)
print('\nOutput generated. Please verify')
else:
print('\n[ERROR] process_video function returned a ' + str(type(frame_details)) + ' instead of a dictionary.\n')
exit()
except Exception:
print('\n[ERROR] process_video function is throwing an error. Please debug process_video function')
exit()
print('\n============================================')
| '''
*****************************************************************************************
*
* ===============================================
* Nirikshak Bot (NB) Theme (eYRC 2020-21)
* ===============================================
*
* This script is to implement Task 1A - Part 2 of Nirikshak Bot (NB) Theme (eYRC 2020-21).
*
* This software is made available on an "AS IS WHERE IS BASIS".
* Licensee/end user indemnifies and will keep e-Yantra indemnified from
* any and all claim(s) that emanate from the use of the Software or
* breach of the terms of this agreement.
*
* e-Yantra - An MHRD project under National Mission on Education using ICT (NMEICT)
*
*****************************************************************************************
'''
# Team ID: [ Team-ID ]
# Author List: [ <NAME> ]
# Filename: task_1a_part1.py
# Functions: process_video
# [ Comma separated list of functions in this file ]
# Global variables:frame_details
# [ List of global variables defined in this file ]
####################### IMPORT MODULES #######################
## You are not allowed to make any changes in this section. ##
## You have to implement this task with the three available ##
## modules for this task (numpy, opencv, os) ##
##############################################################
import cv2
import numpy as np
import os
##############################################################
# Global variable for details of frames seleced in the video will be put in this dictionary, returned from process_video function
frame_details = {}
################# ADD UTILITY FUNCTIONS HERE #################
## You can define any utility functions for your code. ##
## Please add proper comments to ensure that your code is ##
## readable and easy to understand. ##
##############################################################
##############################################################
def process_video(vid_file_path, frame_list):
global frame_details
############## ADD YOUR CODE HERE ##############
framedet2 = {} #unsorted dictionary
capture = cv2.VideoCapture(vid_file_path)
frame_count = int(capture.get(cv2.CAP_PROP_FRAME_COUNT)) #total number of frames in saved video file
indexlist=[]
imagelist = []
for i in frame_list:
i=i-1
if i==0:
_, frame = capture.read() #this reads the first frame
imagelist.append(frame)
indexlist.append(i)
elif i>0 and i<frame_count:
capture.set(cv2.CAP_PROP_POS_FRAMES,int(i)) #this sets capture to frame specified in framelist
_, frame = capture.read()
imagelist.append(frame)
indexlist.append(i)
for i in range(len(indexlist)):
index = indexlist[i]
if index>0 and index<frame_count:
img = imagelist[i]
lab = cv2.cvtColor(img,cv2.COLOR_BGR2LAB)
#the LAB colourspace stands for Lightness,
#green to Magenta colour, blue to yellow colour
b = lab[:,:,2]
#the red(ball) appears lighter than most of the background as
#it is unchanged in B component(red is extreme of A component of LAB)
ret,thresh = cv2.threshold(b,240,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
#the ball becomes white
res = cv2.bitwise_xor(b,thresh)
#the red ball appears darker due to XOR operation
#which outputs dark if both input pixels are light
ret, thresh2 = cv2.threshold(res,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
#the ball becomes white with a less noisy background
thresh3 = cv2.bitwise_xor(b,thresh2)
#the ball's red pixels appears as nearly the darkest pixels of the image
minval,maxval,minloc,maxloc = cv2.minMaxLoc(thresh3) #minVal represents the darkest intensity pixel
ret,thresh4 = cv2.threshold(thresh3,int(minval+29),255,cv2.THRESH_BINARY_INV)
#the darkest pixels are turned to white while the rest are black,
#revealing an image with minimal noise
contours,_= cv2.findContours(thresh4.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
#approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True)
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if area>6000:
if M["m00"]!=0:
cx = int(M["m10"]/M["m00"])
cy = int(M["m01"]/M["m00"])
elif M["m00"]==0:
cx,cy=0,0
framedet2[index+1]=[cx,cy]
else:
index=index-1
img = imagelist[i]
index = i
lab = cv2.cvtColor(img,cv2.COLOR_BGR2LAB)
b = lab[:,:,2]
ret,thresh = cv2.threshold(b,240,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
res = cv2.bitwise_xor(b,thresh)
ret, thresh2 = cv2.threshold(res,0,255,cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
thresh3 = cv2.bitwise_xor(b,thresh2)
minval,maxval,minloc,maxloc = cv2.minMaxLoc(thresh3)
ret,thresh4 = cv2.threshold(thresh3,int(minval+29),255,cv2.THRESH_BINARY_INV)
contours,_= cv2.findContours(thresh4.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
#approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True)
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if area>6000:
if M["m00"]!=0:
cx = int(M["m10"]/M["m00"])
cy = int(M["m01"]/M["m00"])
elif M["m00"]==0:
cx,cy=0,0
framedet2[index+1]=[cx,cy]
keys = sorted(framedet2) #implemented dictionary sorting as it has a lesser time complexity than list sorting
for i in range(len(keys)):
frame_details[keys[i]] = framedet2[keys[i]] #values are printed according to new keys
##################################################
return frame_details
# NOTE: YOU ARE NOT ALLOWED TO MAKE ANY CHANGE TO THIS FUNCTION
#
# Function Name: main
# Inputs: None
# Outputs: None
# Purpose: the function first takes input for selecting one of two videos available in Videos folder
# and a input list of frame numbers for which the details are to be calculated. It runs process_video
# function on these two inputs as argument.
if __name__ == '__main__':
curr_dir_path = os.getcwd()
print('Currently working in '+ curr_dir_path)
# path directory of videos in 'Videos' folder
vid_dir_path = curr_dir_path + '/Videos/'
try:
file_count = len(os.listdir(vid_dir_path))
except Exception:
print('\n[ERROR] "Videos" folder is not found in current directory.')
exit()
print('\n============================================')
print('\nSelect the video to process from the options given below:')
print('\nFor processing ballmotion.m4v from Videos folder, enter \t=> 1')
print('\nFor processing ballmotionwhite.m4v from Videos folder, enter \t=> 2')
choice = input('\n==> "1" or "2": ')
if choice == '1':
vid_name = 'ballmotion.m4v'
vid_file_path = vid_dir_path + vid_name
print('\n\tSelected video is: ballmotion.m4v')
elif choice=='2':
vid_name = 'ballmotionwhite.m4v'
vid_file_path = vid_dir_path + vid_name
print('\n\tSelected video is: ballmotionwhite.m4v')
else:
print('\n[ERROR] You did not select from available options!')
exit()
print('\n============================================')
if os.path.exists(vid_file_path):
print('\nFound ' + vid_name)
else:
print('\n[ERROR] ' + vid_name + ' file is not found. Make sure "Videos" folders has the selected file.')
exit()
print('\n============================================')
print('\nEnter list of frame(s) you want to process, (between 1 and 404) (without space & separated by comma) (for example: 33,44,95)')
frame_list = input('\nEnter list ==> ')
frame_list = list(frame_list.split(','))
try:
for i in range(len(frame_list)):
frame_list[i] = int(frame_list[i])
print('\n\tSelected frame(s) is/are: ', frame_list)
except Exception:
print('\n[ERROR] Enter list of frame(s) correctly')
exit()
print('\n============================================')
try:
print('\nRunning process_video function on', vid_name, 'for frame following frame(s):', frame_list)
frame_details = process_video(vid_file_path, frame_list)
if type(frame_details) is dict:
print(frame_details)
print('\nOutput generated. Please verify')
else:
print('\n[ERROR] process_video function returned a ' + str(type(frame_details)) + ' instead of a dictionary.\n')
exit()
except Exception:
print('\n[ERROR] process_video function is throwing an error. Please debug process_video function')
exit()
print('\n============================================')
| en | 0.687495 | ***************************************************************************************** * * =============================================== * Nirikshak Bot (NB) Theme (eYRC 2020-21) * =============================================== * * This script is to implement Task 1A - Part 2 of Nirikshak Bot (NB) Theme (eYRC 2020-21). * * This software is made available on an "AS IS WHERE IS BASIS". * Licensee/end user indemnifies and will keep e-Yantra indemnified from * any and all claim(s) that emanate from the use of the Software or * breach of the terms of this agreement. * * e-Yantra - An MHRD project under National Mission on Education using ICT (NMEICT) * ***************************************************************************************** # Team ID: [ Team-ID ] # Author List: [ <NAME> ] # Filename: task_1a_part1.py # Functions: process_video # [ Comma separated list of functions in this file ] # Global variables:frame_details # [ List of global variables defined in this file ] ####################### IMPORT MODULES ####################### ## You are not allowed to make any changes in this section. ## ## You have to implement this task with the three available ## ## modules for this task (numpy, opencv, os) ## ############################################################## ############################################################## # Global variable for details of frames seleced in the video will be put in this dictionary, returned from process_video function ################# ADD UTILITY FUNCTIONS HERE ################# ## You can define any utility functions for your code. ## ## Please add proper comments to ensure that your code is ## ## readable and easy to understand. ## ############################################################## ############################################################## ############## ADD YOUR CODE HERE ############## #unsorted dictionary #total number of frames in saved video file #this reads the first frame #this sets capture to frame specified in framelist #the LAB colourspace stands for Lightness, #green to Magenta colour, blue to yellow colour #the red(ball) appears lighter than most of the background as #it is unchanged in B component(red is extreme of A component of LAB) #the ball becomes white #the red ball appears darker due to XOR operation #which outputs dark if both input pixels are light #the ball becomes white with a less noisy background #the ball's red pixels appears as nearly the darkest pixels of the image #minVal represents the darkest intensity pixel #the darkest pixels are turned to white while the rest are black, #revealing an image with minimal noise #approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True) #approx = cv2.approxPolyDP(cnt,0.01*cv2.arcLength(cnt,True),True) #implemented dictionary sorting as it has a lesser time complexity than list sorting #values are printed according to new keys ################################################## # NOTE: YOU ARE NOT ALLOWED TO MAKE ANY CHANGE TO THIS FUNCTION # # Function Name: main # Inputs: None # Outputs: None # Purpose: the function first takes input for selecting one of two videos available in Videos folder # and a input list of frame numbers for which the details are to be calculated. It runs process_video # function on these two inputs as argument. # path directory of videos in 'Videos' folder | 1.872382 | 2 |
pj.py | Iamnilaoba/pojian | 2 | 6631831 | <reponame>Iamnilaoba/pojian
from exts import db,mail
import config
from apps.cms.urls import bp as cms_bp
from apps.front.urls import bp as front_bp
from apps.common.urls import bp as common_bp
from flask_wtf import CSRFProtect
from flask import Flask,send_from_directory
from flask import request,url_for
from flask import jsonify
import os
import string
import time
import hashlib
import random
import json
import re
import sys
import qiniu
from io import BytesIO
import base64
app=Flask(__name__)
app.register_blueprint(cms_bp)
app.register_blueprint(front_bp)
app.register_blueprint(common_bp)
#告诉主程序要映射数据库了
app.config.from_object(config)
CSRFProtect(app=app)
db.init_app(app=app)
mail.init_app(app)
#下面这些所有代码是关于在富文本编辑器中上传照片用的
UEDITOR_UPLOAD_PATH = "images" # 上传到本地服务器的路径
UEDITOR_UPLOAD_TO_QINIU = False # 是否上传到七牛云
UEDITOR_QINIU_ACCESS_KEY = ""
UEDITOR_QINIU_SECRET_KEY = ""
UEDITOR_QINIU_BUCKET_NAME = ""
UEDITOR_QINIU_DOMAIN = "peouv6xac.bkt.clouddn.com"
@app.before_first_request
def before_first_request():
global UEDITOR_UPLOAD_TO_QINIU
global UEDITOR_QINIU_ACCESS_KEY
global UEDITOR_QINIU_SECRET_KEY
global UEDITOR_QINIU_BUCKET_NAME
global UEDITOR_QINIU_DOMAIN
UEDITOR_UPLOAD_PATH = app.config.get("UEDITOR_UPLOAD_PATH")
if UEDITOR_UPLOAD_PATH and not os.path.exists(UEDITOR_UPLOAD_PATH):
os.mkdir(UEDITOR_UPLOAD_PATH)
UEDITOR_UPLOAD_TO_QINIU = app.config.get("UEDITOR_UPLOAD_TO_QINIU")
if UEDITOR_UPLOAD_TO_QINIU:
try:
UEDITOR_QINIU_ACCESS_KEY = app.config["UEDITOR_QINIU_ACCESS_KEY"]
UEDITOR_QINIU_SECRET_KEY = app.config["UEDITOR_QINIU_SECRET_KEY"]
UEDITOR_QINIU_BUCKET_NAME = app.config["UEDITOR_QINIU_BUCKET_NAME"]
UEDITOR_QINIU_DOMAIN = app.config["UEDITOR_QINIU_DOMAIN"]
except Exception as e:
option = e.args[0]
raise RuntimeError('请在app.config中配置%s!' % option)
csrf = app.extensions.get('csrf')
if csrf:
csrf.exempt(upload) # 标记是受csrf保护的
# 生成文件的名字
def _random_filename(rawfilename):
letters = string.ascii_letters
random_filename = str(time.time()) + "".join(random.sample(letters,5))
filename = hashlib.md5(random_filename.encode('utf-8')).hexdigest()
subffix = os.path.splitext(rawfilename)[-1]
return filename + subffix
@app.route("/upload/",methods=['GET','POST'])
def upload():
action = request.args.get('action')
result = {}
if action == 'config':
config_path = os.path.join(app.static_folder or app.static_folder,'config.json')
with open(config_path,'r',encoding='utf-8') as fp:
result = json.loads(re.sub(r'\/\*.*\*\/','',fp.read()))
elif action in ['uploadimage','uploadvideo','uploadfile']:
image = request.files.get("upfile")
filename = image.filename
save_filename = _random_filename(filename)
result = {
'state': '',
'url': '',
'title': '',
'original': ''
}
if UEDITOR_UPLOAD_TO_QINIU:
if not sys.modules.get('qiniu'):
raise RuntimeError('没有导入qiniu模块!')
q = qiniu.Auth(UEDITOR_QINIU_ACCESS_KEY,UEDITOR_QINIU_SECRET_KEY)
token = q.upload_token(UEDITOR_QINIU_BUCKET_NAME)
buffer = BytesIO()
image.save(buffer)
buffer.seek(0)
ret,info = qiniu.put_data(token,save_filename,buffer.read())
if info.ok:
result['state'] = "SUCCESS"
result['url'] = "http://peouv6xac.bkt.clouddn.com/"+ret['key']
result['title'] = ret['key']
result['original'] = "http://peouv6xac.bkt.clouddn.com/"+ret['key']
else:
image.save(os.path.join(UEDITOR_UPLOAD_PATH, save_filename))
result['state'] = "SUCCESS"
result['url'] = url_for('files',filename=save_filename)
result['title'] = save_filename,
result['original'] = image.filename
elif action == 'uploadscrawl':
base64data = request.form.get("upfile")
img = base64.b64decode(base64data)
filename = _random_filename('xx.png')
filepath = os.path.join(UEDITOR_UPLOAD_PATH,filename)
with open(filepath,'wb') as fp:
fp.write(img)
result = {
"state": "SUCCESS",
"url": url_for('files',filename=filename),
"title": filename,
"original": filename
}
return jsonify(result)
@app.route('/files/<filename>/')
def files(filename):
return send_from_directory(UEDITOR_UPLOAD_PATH,filename)
# 注册过滤器
@app.template_filter('convert')
def converTime(t): # 2018-10-12 12:12:12
t = t.strftime("%Y-%m-%d %H:%M:%S")
t = time.strptime(t,"%Y-%m-%d %H:%M:%S")
t = time.mktime(t)
curremt_t = time.time()
r = curremt_t - t
if r < 60 :
return "1分钟之前"
elif r < 60*10:
return "10分钟之前"
elif r < 60*60:
return "1小时之前"
else:
return "1天前"
if __name__ == '__main__':
app.run() | from exts import db,mail
import config
from apps.cms.urls import bp as cms_bp
from apps.front.urls import bp as front_bp
from apps.common.urls import bp as common_bp
from flask_wtf import CSRFProtect
from flask import Flask,send_from_directory
from flask import request,url_for
from flask import jsonify
import os
import string
import time
import hashlib
import random
import json
import re
import sys
import qiniu
from io import BytesIO
import base64
app=Flask(__name__)
app.register_blueprint(cms_bp)
app.register_blueprint(front_bp)
app.register_blueprint(common_bp)
#告诉主程序要映射数据库了
app.config.from_object(config)
CSRFProtect(app=app)
db.init_app(app=app)
mail.init_app(app)
#下面这些所有代码是关于在富文本编辑器中上传照片用的
UEDITOR_UPLOAD_PATH = "images" # 上传到本地服务器的路径
UEDITOR_UPLOAD_TO_QINIU = False # 是否上传到七牛云
UEDITOR_QINIU_ACCESS_KEY = ""
UEDITOR_QINIU_SECRET_KEY = ""
UEDITOR_QINIU_BUCKET_NAME = ""
UEDITOR_QINIU_DOMAIN = "peouv6xac.bkt.clouddn.com"
@app.before_first_request
def before_first_request():
global UEDITOR_UPLOAD_TO_QINIU
global UEDITOR_QINIU_ACCESS_KEY
global UEDITOR_QINIU_SECRET_KEY
global UEDITOR_QINIU_BUCKET_NAME
global UEDITOR_QINIU_DOMAIN
UEDITOR_UPLOAD_PATH = app.config.get("UEDITOR_UPLOAD_PATH")
if UEDITOR_UPLOAD_PATH and not os.path.exists(UEDITOR_UPLOAD_PATH):
os.mkdir(UEDITOR_UPLOAD_PATH)
UEDITOR_UPLOAD_TO_QINIU = app.config.get("UEDITOR_UPLOAD_TO_QINIU")
if UEDITOR_UPLOAD_TO_QINIU:
try:
UEDITOR_QINIU_ACCESS_KEY = app.config["UEDITOR_QINIU_ACCESS_KEY"]
UEDITOR_QINIU_SECRET_KEY = app.config["UEDITOR_QINIU_SECRET_KEY"]
UEDITOR_QINIU_BUCKET_NAME = app.config["UEDITOR_QINIU_BUCKET_NAME"]
UEDITOR_QINIU_DOMAIN = app.config["UEDITOR_QINIU_DOMAIN"]
except Exception as e:
option = e.args[0]
raise RuntimeError('请在app.config中配置%s!' % option)
csrf = app.extensions.get('csrf')
if csrf:
csrf.exempt(upload) # 标记是受csrf保护的
# 生成文件的名字
def _random_filename(rawfilename):
letters = string.ascii_letters
random_filename = str(time.time()) + "".join(random.sample(letters,5))
filename = hashlib.md5(random_filename.encode('utf-8')).hexdigest()
subffix = os.path.splitext(rawfilename)[-1]
return filename + subffix
@app.route("/upload/",methods=['GET','POST'])
def upload():
action = request.args.get('action')
result = {}
if action == 'config':
config_path = os.path.join(app.static_folder or app.static_folder,'config.json')
with open(config_path,'r',encoding='utf-8') as fp:
result = json.loads(re.sub(r'\/\*.*\*\/','',fp.read()))
elif action in ['uploadimage','uploadvideo','uploadfile']:
image = request.files.get("upfile")
filename = image.filename
save_filename = _random_filename(filename)
result = {
'state': '',
'url': '',
'title': '',
'original': ''
}
if UEDITOR_UPLOAD_TO_QINIU:
if not sys.modules.get('qiniu'):
raise RuntimeError('没有导入qiniu模块!')
q = qiniu.Auth(UEDITOR_QINIU_ACCESS_KEY,UEDITOR_QINIU_SECRET_KEY)
token = q.upload_token(UEDITOR_QINIU_BUCKET_NAME)
buffer = BytesIO()
image.save(buffer)
buffer.seek(0)
ret,info = qiniu.put_data(token,save_filename,buffer.read())
if info.ok:
result['state'] = "SUCCESS"
result['url'] = "http://peouv6xac.bkt.clouddn.com/"+ret['key']
result['title'] = ret['key']
result['original'] = "http://peouv6xac.bkt.clouddn.com/"+ret['key']
else:
image.save(os.path.join(UEDITOR_UPLOAD_PATH, save_filename))
result['state'] = "SUCCESS"
result['url'] = url_for('files',filename=save_filename)
result['title'] = save_filename,
result['original'] = image.filename
elif action == 'uploadscrawl':
base64data = request.form.get("upfile")
img = base64.b64decode(base64data)
filename = _random_filename('xx.png')
filepath = os.path.join(UEDITOR_UPLOAD_PATH,filename)
with open(filepath,'wb') as fp:
fp.write(img)
result = {
"state": "SUCCESS",
"url": url_for('files',filename=filename),
"title": filename,
"original": filename
}
return jsonify(result)
@app.route('/files/<filename>/')
def files(filename):
return send_from_directory(UEDITOR_UPLOAD_PATH,filename)
# 注册过滤器
@app.template_filter('convert')
def converTime(t): # 2018-10-12 12:12:12
t = t.strftime("%Y-%m-%d %H:%M:%S")
t = time.strptime(t,"%Y-%m-%d %H:%M:%S")
t = time.mktime(t)
curremt_t = time.time()
r = curremt_t - t
if r < 60 :
return "1分钟之前"
elif r < 60*10:
return "10分钟之前"
elif r < 60*60:
return "1小时之前"
else:
return "1天前"
if __name__ == '__main__':
app.run() | zh | 0.989493 | #告诉主程序要映射数据库了 #下面这些所有代码是关于在富文本编辑器中上传照片用的 # 上传到本地服务器的路径 # 是否上传到七牛云 # 标记是受csrf保护的 # 生成文件的名字 # 注册过滤器 # 2018-10-12 12:12:12 | 2.138436 | 2 |
tests/test_connectors_darshan.py | NERSC/pytokio | 22 | 6631832 | <reponame>NERSC/pytokio
#!/usr/bin/env python
"""
Test the Darshan connector
"""
import tokiotest
import tokio.connectors.darshan
def verify_darshan(darshan_data):
"""
Verify that all components of a Darshan object are defined
"""
assert darshan_data is not None
# Make sure mount table parsing works
assert 'mounts' in darshan_data
assert darshan_data['mounts']
# Make sure header parsing works
assert 'header' in darshan_data
assert darshan_data['header']
# Ensure that counters were found
assert 'counters' in darshan_data
assert darshan_data['counters']
# Ensure the POSIX module and files were found (it should always be present)
assert 'posix' in darshan_data['counters']
assert darshan_data['counters']['posix']
def verify_base_counters(darshan_data):
"""
Verify that the base counters are correctly populated
"""
# Examine the first POSIX file record containing base counters
first_base_key = None
for key in darshan_data['counters']['posix']:
if key not in ('_perf', '_total'):
print("Found first base key %s" % key)
first_base_key = key
break
assert first_base_key is not None
posix_record = darshan_data['counters']['posix'][first_base_key]
assert posix_record
# Ensure that it contains an OPENS counter
assert 'OPENS' in next(iter(posix_record.values()))
# Ensure that multiple modules were found (STDIO should always exist too)
assert 'stdio' in darshan_data['counters']
def verify_total_counters(darshan_data):
"""
Verify that the total counters are correctly populated
"""
# Ensure that the total counters were extracted
assert '_total' in darshan_data['counters']['posix']
# Ensure that it contains an OPENS counter
assert 'OPENS' in darshan_data['counters']['posix']['_total']
# Ensure that multiple modules were found (STDIO should always exist too)
assert 'stdio' in darshan_data['counters']
# Ensure that it contains an OPENS counter
assert 'OPENS' in darshan_data['counters']['stdio']['_total']
def verify_perf_counters(darshan_data):
"""
Verify that the perf counters are correctly populated
"""
# Ensure that the perf counters were extracted
assert '_perf' in darshan_data['counters']['posix']
# Look for a few important counters
assert 'total_bytes' in darshan_data['counters']['posix']['_perf']
assert 'agg_perf_by_slowest' in darshan_data['counters']['posix']['_perf']
# Make sure all counters appear in all modules
for module in darshan_data['counters']:
for counter in darshan_data['counters']['posix']['_perf']:
# the lustre module does not provide any perf information
assert module == 'lustre' or counter in darshan_data['counters'][module]['_perf']
@tokiotest.needs_darshan
def test_base():
"""
darshan_parser_base() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
verify_darshan(darshan)
verify_base_counters(darshan)
@tokiotest.needs_darshan
def test_total():
"""
darshan_parser_total() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_total_counters(darshan)
@tokiotest.needs_darshan
def test_perf():
"""
darshan_parser_perf() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
verify_darshan(darshan)
verify_perf_counters(darshan)
@tokiotest.needs_darshan
def test_all():
"""
ensure that all parsers produce non-conflicting keys
"""
tokiotest.check_darshan()
# try parsing in different orders just to make sure that no method is nuking the others
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
darshan.darshan_parser_base()
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
darshan.darshan_parser_perf()
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
darshan.darshan_parser_total()
darshan.darshan_parser_perf()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
darshan.darshan_parser_total()
darshan.darshan_parser_base()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
def test_filename_metadata():
"""darshan.Darshan filename metadata"""
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_FQLOG)
assert darshan.filename_metadata
for key, value in tokiotest.SAMPLE_DARSHAN_FQLOG_META.items():
assert key in darshan.filename_metadata
assert darshan.filename_metadata[key] == value
| #!/usr/bin/env python
"""
Test the Darshan connector
"""
import tokiotest
import tokio.connectors.darshan
def verify_darshan(darshan_data):
"""
Verify that all components of a Darshan object are defined
"""
assert darshan_data is not None
# Make sure mount table parsing works
assert 'mounts' in darshan_data
assert darshan_data['mounts']
# Make sure header parsing works
assert 'header' in darshan_data
assert darshan_data['header']
# Ensure that counters were found
assert 'counters' in darshan_data
assert darshan_data['counters']
# Ensure the POSIX module and files were found (it should always be present)
assert 'posix' in darshan_data['counters']
assert darshan_data['counters']['posix']
def verify_base_counters(darshan_data):
"""
Verify that the base counters are correctly populated
"""
# Examine the first POSIX file record containing base counters
first_base_key = None
for key in darshan_data['counters']['posix']:
if key not in ('_perf', '_total'):
print("Found first base key %s" % key)
first_base_key = key
break
assert first_base_key is not None
posix_record = darshan_data['counters']['posix'][first_base_key]
assert posix_record
# Ensure that it contains an OPENS counter
assert 'OPENS' in next(iter(posix_record.values()))
# Ensure that multiple modules were found (STDIO should always exist too)
assert 'stdio' in darshan_data['counters']
def verify_total_counters(darshan_data):
"""
Verify that the total counters are correctly populated
"""
# Ensure that the total counters were extracted
assert '_total' in darshan_data['counters']['posix']
# Ensure that it contains an OPENS counter
assert 'OPENS' in darshan_data['counters']['posix']['_total']
# Ensure that multiple modules were found (STDIO should always exist too)
assert 'stdio' in darshan_data['counters']
# Ensure that it contains an OPENS counter
assert 'OPENS' in darshan_data['counters']['stdio']['_total']
def verify_perf_counters(darshan_data):
"""
Verify that the perf counters are correctly populated
"""
# Ensure that the perf counters were extracted
assert '_perf' in darshan_data['counters']['posix']
# Look for a few important counters
assert 'total_bytes' in darshan_data['counters']['posix']['_perf']
assert 'agg_perf_by_slowest' in darshan_data['counters']['posix']['_perf']
# Make sure all counters appear in all modules
for module in darshan_data['counters']:
for counter in darshan_data['counters']['posix']['_perf']:
# the lustre module does not provide any perf information
assert module == 'lustre' or counter in darshan_data['counters'][module]['_perf']
@tokiotest.needs_darshan
def test_base():
"""
darshan_parser_base() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
verify_darshan(darshan)
verify_base_counters(darshan)
@tokiotest.needs_darshan
def test_total():
"""
darshan_parser_total() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_total_counters(darshan)
@tokiotest.needs_darshan
def test_perf():
"""
darshan_parser_perf() method
"""
tokiotest.check_darshan()
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
verify_darshan(darshan)
verify_perf_counters(darshan)
@tokiotest.needs_darshan
def test_all():
"""
ensure that all parsers produce non-conflicting keys
"""
tokiotest.check_darshan()
# try parsing in different orders just to make sure that no method is nuking the others
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
darshan.darshan_parser_base()
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
darshan.darshan_parser_perf()
darshan.darshan_parser_total()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_base()
darshan.darshan_parser_total()
darshan.darshan_parser_perf()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_LOG)
darshan.darshan_parser_perf()
darshan.darshan_parser_total()
darshan.darshan_parser_base()
verify_darshan(darshan)
verify_perf_counters(darshan)
verify_base_counters(darshan)
verify_total_counters(darshan)
def test_filename_metadata():
"""darshan.Darshan filename metadata"""
darshan = tokio.connectors.darshan.Darshan(tokiotest.SAMPLE_DARSHAN_FQLOG)
assert darshan.filename_metadata
for key, value in tokiotest.SAMPLE_DARSHAN_FQLOG_META.items():
assert key in darshan.filename_metadata
assert darshan.filename_metadata[key] == value | en | 0.931883 | #!/usr/bin/env python Test the Darshan connector Verify that all components of a Darshan object are defined # Make sure mount table parsing works # Make sure header parsing works # Ensure that counters were found # Ensure the POSIX module and files were found (it should always be present) Verify that the base counters are correctly populated # Examine the first POSIX file record containing base counters # Ensure that it contains an OPENS counter # Ensure that multiple modules were found (STDIO should always exist too) Verify that the total counters are correctly populated # Ensure that the total counters were extracted # Ensure that it contains an OPENS counter # Ensure that multiple modules were found (STDIO should always exist too) # Ensure that it contains an OPENS counter Verify that the perf counters are correctly populated # Ensure that the perf counters were extracted # Look for a few important counters # Make sure all counters appear in all modules # the lustre module does not provide any perf information darshan_parser_base() method darshan_parser_total() method darshan_parser_perf() method ensure that all parsers produce non-conflicting keys # try parsing in different orders just to make sure that no method is nuking the others darshan.Darshan filename metadata | 2.412396 | 2 |
include/camera_driver/camera_driver.py | lanfis/Camera_Controler | 0 | 6631833 | <reponame>lanfis/Camera_Controler
#!/usr/bin/env python
# license removed for brevity
import time
import os
import sys
sys.path.append(os.path.expanduser("~"))
current_folder = os.path.dirname(os.path.realpath(__file__))
include_folder = os.path.abspath(current_folder + "/../../include")
sys.path.append(current_folder)
sys.path.append(include_folder)
sys.path.append(current_folder + '/../matrix/python')
from console_format import Console_Format
OUT = Console_Format()
import rospy
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import cv2
import piggyphoto as pphoto
OUT = Console_Format()
class Camera_Driver:
##PUBLIC:
node_name = "Camera_Controler"
topic_image_pub = node_name + "/image_pub"
topic_control_data_sub = node_name + "/control_data_sub"
temp_folder = os.path.join(current_folder, "DCIM")
preview_data_name = "preview.jpg"
image_data_name = "image"
image_data_idx = 1
image_data_name_postifx = ".jpg"
time_shoot_pic = 3
flag_window = False
##PRIVATE:
camera_handler = pphoto.camera(autoInit=False)
is_init = False
is_capture = False
image = None
control_data = ""
image_folder = ""
image_name = ""
queue_size = 4
bridge = CvBridge()
image_pub_ = None
control_data_sub_ = None
def run(self):
if not self.is_capture:
self.preview()
self.image_publish()
def abilities(self):
return self.camera_handler.abilities
def download(self, dst_path=current_folder, src_folder=None, src_name=None):
src_folder = self.image_folder if src_folder is None else src_folder
src_name = self.image_name if src_folder is None else src_name
OUT.INFO(self.node_name, "Downloading images : {} ...".format(os.path.join(src_folder, self.src_name)))
self.camera_handler.download_file(srcfolder, srcfilename, dst_path)
def preview(self, dst_path=None):
if not self.is_init:
self.init()
path = os.path.join(self.temp_folder, self.preview_data_name) if dst_path is None else dst_path
self.camera_handler.capture_preview(path)
self.image = cv2.imread(path, cv2.IMREAD_COLOR)
return path
def capture(self, download_path=None):
path = os.path.join(self.temp_folder, "{}_{}{}".format(self.image_data_name, self.image_data_idx, self.image_data_name_postifx)) if download_path is None else download_path
OUT.INFO(self.node_name, "Capturing image ...")
if download_path is None:
'''
self.image_folder, self.image_name = self.camera_handler.capture_image()
path = os.path.join(self.image_folder, self.image_name)
'''
self.camera_handler.capture_image(destpath=path)
else:
self.camera_handler.capture_image(destpath=path)
OUT.INFO(self.node_name, "Capturing image ok ! Saving data in : {}".format(path))
self.image_data_idx += 1
def exit(self):
self.camera_handler.exit()
def image_publish(self):
self.image_pub_.publish(self.bridge.cv2_to_imgmsg(self.image, "bgr8"))
if self.flag_window:
cv2.imshow(self.node_name, self.image)
cv2.waitKey(1)
def control_data_callback(self, msg):
self.control_data = msg.data
if self.control_data == "preview":
self.preview()
self.image_publish()
if self.control_data == "capture":
if self.is_capture:
return
self.is_capture = True
self.capture()
time.sleep(self.time_shoot_pic)
self.is_capture = False
def pub_init(self):
OUT.INFO(self.node_name, "Publisher {} initiating !".format(self.topic_image_pub))
self.image_pub_ = rospy.Publisher(self.topic_image_pub, Image, queue_size=self.queue_size)
def sub_init(self):
OUT.INFO(self.node_name, "Subscriber {} initiating !".format(self.topic_control_data_sub))
self.control_data_sub = rospy.Subscriber(self.topic_control_data_sub, String, self.control_data_callback)
def init(self):
if not os.path.exists(self.temp_folder):
os.makedirs(self.temp_folder)
if self.is_init:
OUT.INFO(self.node_name, "Reinitializing camera handler ...")
self.camera_handler.reinit()
else:
OUT.INFO(self.node_name, "Initializing camera handler ...")
self.camera_handler.init()
self.is_init = self.camera_handler.initialized
if self.is_init:
OUT.INFO(self.node_name, "Initializing camera handler ok !")
print(self.abilities())
else:
OUT.WARN(self.node_name, "Initializing camera handler fail !")
def __init__(self, name=None):
self.node_name = self.node_name if name is None else name
self.pub_init()
self.sub_init()
self.init()
| #!/usr/bin/env python
# license removed for brevity
import time
import os
import sys
sys.path.append(os.path.expanduser("~"))
current_folder = os.path.dirname(os.path.realpath(__file__))
include_folder = os.path.abspath(current_folder + "/../../include")
sys.path.append(current_folder)
sys.path.append(include_folder)
sys.path.append(current_folder + '/../matrix/python')
from console_format import Console_Format
OUT = Console_Format()
import rospy
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import cv2
import piggyphoto as pphoto
OUT = Console_Format()
class Camera_Driver:
##PUBLIC:
node_name = "Camera_Controler"
topic_image_pub = node_name + "/image_pub"
topic_control_data_sub = node_name + "/control_data_sub"
temp_folder = os.path.join(current_folder, "DCIM")
preview_data_name = "preview.jpg"
image_data_name = "image"
image_data_idx = 1
image_data_name_postifx = ".jpg"
time_shoot_pic = 3
flag_window = False
##PRIVATE:
camera_handler = pphoto.camera(autoInit=False)
is_init = False
is_capture = False
image = None
control_data = ""
image_folder = ""
image_name = ""
queue_size = 4
bridge = CvBridge()
image_pub_ = None
control_data_sub_ = None
def run(self):
if not self.is_capture:
self.preview()
self.image_publish()
def abilities(self):
return self.camera_handler.abilities
def download(self, dst_path=current_folder, src_folder=None, src_name=None):
src_folder = self.image_folder if src_folder is None else src_folder
src_name = self.image_name if src_folder is None else src_name
OUT.INFO(self.node_name, "Downloading images : {} ...".format(os.path.join(src_folder, self.src_name)))
self.camera_handler.download_file(srcfolder, srcfilename, dst_path)
def preview(self, dst_path=None):
if not self.is_init:
self.init()
path = os.path.join(self.temp_folder, self.preview_data_name) if dst_path is None else dst_path
self.camera_handler.capture_preview(path)
self.image = cv2.imread(path, cv2.IMREAD_COLOR)
return path
def capture(self, download_path=None):
path = os.path.join(self.temp_folder, "{}_{}{}".format(self.image_data_name, self.image_data_idx, self.image_data_name_postifx)) if download_path is None else download_path
OUT.INFO(self.node_name, "Capturing image ...")
if download_path is None:
'''
self.image_folder, self.image_name = self.camera_handler.capture_image()
path = os.path.join(self.image_folder, self.image_name)
'''
self.camera_handler.capture_image(destpath=path)
else:
self.camera_handler.capture_image(destpath=path)
OUT.INFO(self.node_name, "Capturing image ok ! Saving data in : {}".format(path))
self.image_data_idx += 1
def exit(self):
self.camera_handler.exit()
def image_publish(self):
self.image_pub_.publish(self.bridge.cv2_to_imgmsg(self.image, "bgr8"))
if self.flag_window:
cv2.imshow(self.node_name, self.image)
cv2.waitKey(1)
def control_data_callback(self, msg):
self.control_data = msg.data
if self.control_data == "preview":
self.preview()
self.image_publish()
if self.control_data == "capture":
if self.is_capture:
return
self.is_capture = True
self.capture()
time.sleep(self.time_shoot_pic)
self.is_capture = False
def pub_init(self):
OUT.INFO(self.node_name, "Publisher {} initiating !".format(self.topic_image_pub))
self.image_pub_ = rospy.Publisher(self.topic_image_pub, Image, queue_size=self.queue_size)
def sub_init(self):
OUT.INFO(self.node_name, "Subscriber {} initiating !".format(self.topic_control_data_sub))
self.control_data_sub = rospy.Subscriber(self.topic_control_data_sub, String, self.control_data_callback)
def init(self):
if not os.path.exists(self.temp_folder):
os.makedirs(self.temp_folder)
if self.is_init:
OUT.INFO(self.node_name, "Reinitializing camera handler ...")
self.camera_handler.reinit()
else:
OUT.INFO(self.node_name, "Initializing camera handler ...")
self.camera_handler.init()
self.is_init = self.camera_handler.initialized
if self.is_init:
OUT.INFO(self.node_name, "Initializing camera handler ok !")
print(self.abilities())
else:
OUT.WARN(self.node_name, "Initializing camera handler fail !")
def __init__(self, name=None):
self.node_name = self.node_name if name is None else name
self.pub_init()
self.sub_init()
self.init() | en | 0.578293 | #!/usr/bin/env python # license removed for brevity ##PUBLIC: ##PRIVATE: self.image_folder, self.image_name = self.camera_handler.capture_image() path = os.path.join(self.image_folder, self.image_name) | 2.438694 | 2 |
libraries/alerts/api/serializers.py | cca/libraries_wagtail | 9 | 6631834 | from rest_framework import serializers
from alerts.models import Alert
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = ['id', 'url', 'last_published_at', 'alert_text', 'alert_link', 'alert_link_text', ]
| from rest_framework import serializers
from alerts.models import Alert
class AlertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Alert
fields = ['id', 'url', 'last_published_at', 'alert_text', 'alert_link', 'alert_link_text', ]
| none | 1 | 2.113174 | 2 |
|
compago_plugins/__init__.py | civitaslearning/compago | 0 | 6631835 | from logging_plugin import LoggingPlugin
from config_plugin import ConfigPlugin
| from logging_plugin import LoggingPlugin
from config_plugin import ConfigPlugin
| none | 1 | 1.110951 | 1 |
|
stsc.py | miaocheng/STSC | 0 | 6631836 | <reponame>miaocheng/STSC
# &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# stsc.py
#
# This python file contains the definition of self-tuning spectral clustering method.
# Reference:
# <NAME> and <NAME>, Self-Tuning Spectral Clustering, in Proc. Neural
# Information Processing Systems, Vancouver, Canada, 2004.
#
# Note: It is a trivial implementation in a literal manner, and has been put on hold for a term. Thus,
# either accuracy and completeness are still unknown.
#
# <NAME>
# Email: <EMAIL>
# Date: 2021-07
# &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
import numpy as np
from numpy import linalg as la
from functools import reduce
from scipy.optimize import minimize
from cala import *
class stsc(object):
def __init__(self, X, kwargs):
self.__X = X
self.__xDim, self.__xSam = np.shape(X)
if 'k' not in kwargs:
kwargs['k'] = 5
if 'c' not in kwargs:
kwargs['c'] = 3
if 't' not in kwargs:
kwargs['t'] = 1
if 'ctype' not in kwargs:
kwargs['ctype'] = 'stsc'
if 'atype' not in kwargs:
kwargs['atype'] = 'self'
if 'nIter' not in kwargs:
kwargs['nIter'] = 1000
if 'tol' not in kwargs:
kwargs['tol'] = 1e-6
# +++++ Parameters of STSC +++++
if 'min_Cls' not in kwargs:
kwargs['min_Cls'] = 2
if 'max_Cls' not in kwargs:
kwargs['max_Cls'] = 2
self.__k = kwargs['k']
self.__c = kwargs['c']
self.__t = kwargs['t']
self.__ctype = kwargs['stsc']
self.__atype = kwargs['self']
self.__nIter = kwargs['nIter']
self.__tol = kwargs['tol']
# +++++ Parameters of STSC +++++
self.__min_Cls = kwargs['min_Cls']
self.__max_Cls = kwargs['max_Cls']
# ++++++++++ Initialization ++++++++++
self.__getS()
self.__getL()
self.__normL()
self.__cls = np.zeros((self.__c, self.__xSam))
pass
def __getS(self):
D = eudist(self.__X, self.__X, False)
if self.__atype == 'one':
tmp = - D / self.__t
elif self.__atype == 'self':
M, index = sortMat(D, 'Row', 'Ascend')
d = M[:, self.__k]
dd = np.dot(d, np.transpose(d))
tmp = - D / dd
# ++++++++++ Exp Affinity ++++++++++
S = np.exp(tmp)
for i in range(self.__xSam):
S[i, i] = 0
N, index = sortMat(D, 'Row', 'Descend')
ind = index[:, 0:self.__k]
T = np.zeros((self.__xSam, self.__xSam))
for i in range(self.__xSam):
for j in range(self.__k):
tid = ind[i, j]
T[i, tid] = S[i, tid]
T = T + T
T = T * 0.5
self.__S = T
return True
def __getL(self):
tmp = np.sum(self.__S, axis=1)
self.__D = np.diag(tmp)
self.__L = self.__D - self.__S
return True
def __normL(self):
d = np.diag(self.__D)
d = d ** (- 0.5)
dd = np.diag(d)
tmp = np.dot(dd, self.__S)
tmq = np.dot(tmp, dd)
self.__nL = tmq
return True
def __updMeans(self):
for i in range(self.__c):
tmp = self.__cls[i, :]
n = np.sum(tmp)
tmq = repVec(tmp, self.__xDim)
tmx = tmq * self.__X
mx = np.sum(tmx, axis=1)
mx = mx / n
self.__km[:, i] = mx
return True
def __kmeans(self, X):
xDim, xSam = np.shape(X)
assert xDim == self.__c, 'The length of feature sizes are not identical !'
# ++++++++++ Initialize the means ++++++++++
ind = np.arange(xSam)
np.random.shuffle(ind)
ind = ind[0:self.__c]
self.__km = X[:, ind]
old_cls = self.__cls
for ii in range(self.__nIter):
d = eudist(X, self.__km, False)
dd, index = sortMat(d, 'Row', 'Ascend')
ind = index[:, 0]
# ++++++++++ Aligned samples ++++++++++
self.__cls = np.zeros((self.__c, self.__xSam))
for i in range(xSam):
tid = ind[i]
self.__cls[tid, i] = 1
self.__updMeans()
# ++++++++++ Check the convergency ++++++++++
tmp = self.__cls - old_cls
tmq = tmp * tmp
Obj = norm(tmq, 1)
str_ = 'The %d' %ii + '-th iteration: %f' %Obj
print(str_)
if Obj < self.__tol:
break
old_cls = self.__cls
return Obj
def __njw(self):
U, s, V = la.svd(self.__nL, full_matrices=False)
V = np.transpose(V)
s, r = getRank(s)
# ++++++++++ Normalization ++++++++++
U = U[:, 0:r]
cc = U[:, 0:self.__c]
tmp = cc * cc
tmq = sum(tmp, axis=1)
tmq = np.sqrt(tmq)
tm = tmp / tmq
tm = np.transpose(tm)
self.__kmeans(tm)
return True
# ++++++++++ Self-tuning clustering ++++++++++
def __GivensRotation(self, i, j, theta, size):
g = np.eye(size)
c = np.cos(theta)
s = np.sin(theta)
g[i, i] = 0
g[j, j] = 0
g[j, i] = 0
g[i, j] = 0
ii_mat = np.zeros_like(g)
ii_mat[i, i] = 1
jj_mat = np.zeros_like(g)
jj_mat[j, j] = 1
ji_mat = np.zeros_like(g)
ji_mat[j, i] = 1
ij_mat = np.zeros_like(g)
ij_mat[i, j] = 1
return g + c * ii_mat + c * jj_mat + s * ji_mat - s * ij_mat
def __generate_list(self, ij_list, theta_list, size):
return [self.__GivensRotation(ij[0], ij[1], theta, size)
for ij, theta in zip(ij_list, theta_list)]
def __rotation(self, X, c):
ij_list = [(i, j) for i in range(c) for j in range(c) if i < j]
def cost(self, X, c, ij_list, theta_list):
U_list = self.__generate_list(ij_list, theta_list, c)
R = reduce(np.dot, U_list, np.eye(c))
Z = X.dot(R)
M = np.max(Z, axis=1, keepdims=True)
N = np.sum((Z / M) ** 2)
return N
theta_list_init = np.array([0.0] * int(c * (c - 1) / 2))
opt = minimize(cost,
x0 = theta_list_init,
method = 'CG',
jac = grad(cost),
options = {'disp': False})
return opt.fun, reduce(np.dot, self.__generate_list(ij_list, opt.x, c), np.eye(c))
def __reformat(labels, n):
zipped_data = zip(labels, range(n))
zipped_data = sorted(zipped_data, key=lambda x: x[0])
grouped_feature_id = [[j[1] for j in i[1]] for i in groupby(zipped_data, lambda x: x[0])]
return grouped_feature_id
def __stsc(self):
U, s, V = la.svd(self.__nL, full_matrices=False)
V = np.transpose(V)
s, r = getRank(s)
#t = revArr(s)
ss = np.sum(s)
if ss < 2:
self.__max_Cls = 2
else:
self.__max_Cls = int(ss)
re = []
for i in range(self.__min_Cls, self.__max_Cls + 1):
tmv = U[:, :i]
cost, tmr = self.__rotation(tmv, i)
re.append((cost, tmv.dot(tmr)))
str_ = 'n_cluster: %d' %c + '\t cost: %f' %cost
print(str_)
COST, Z = sorted(re, key = lambda x: x[0])[0]
tm = self.__reformat(np.argmax(Z, axis=1), Z.shape[0])
return tm
def Learn(self):
if self.__ctype == 'stsc':
self.__stsc()
elif self.__ctype == 'njw':
self.__njw()
return True
def getLabel(self):
B, index = iMax(self.__cls, axis=0)
labels = index
return labels
| # &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
# stsc.py
#
# This python file contains the definition of self-tuning spectral clustering method.
# Reference:
# <NAME> and <NAME>, Self-Tuning Spectral Clustering, in Proc. Neural
# Information Processing Systems, Vancouver, Canada, 2004.
#
# Note: It is a trivial implementation in a literal manner, and has been put on hold for a term. Thus,
# either accuracy and completeness are still unknown.
#
# <NAME>
# Email: <EMAIL>
# Date: 2021-07
# &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
import numpy as np
from numpy import linalg as la
from functools import reduce
from scipy.optimize import minimize
from cala import *
class stsc(object):
def __init__(self, X, kwargs):
self.__X = X
self.__xDim, self.__xSam = np.shape(X)
if 'k' not in kwargs:
kwargs['k'] = 5
if 'c' not in kwargs:
kwargs['c'] = 3
if 't' not in kwargs:
kwargs['t'] = 1
if 'ctype' not in kwargs:
kwargs['ctype'] = 'stsc'
if 'atype' not in kwargs:
kwargs['atype'] = 'self'
if 'nIter' not in kwargs:
kwargs['nIter'] = 1000
if 'tol' not in kwargs:
kwargs['tol'] = 1e-6
# +++++ Parameters of STSC +++++
if 'min_Cls' not in kwargs:
kwargs['min_Cls'] = 2
if 'max_Cls' not in kwargs:
kwargs['max_Cls'] = 2
self.__k = kwargs['k']
self.__c = kwargs['c']
self.__t = kwargs['t']
self.__ctype = kwargs['stsc']
self.__atype = kwargs['self']
self.__nIter = kwargs['nIter']
self.__tol = kwargs['tol']
# +++++ Parameters of STSC +++++
self.__min_Cls = kwargs['min_Cls']
self.__max_Cls = kwargs['max_Cls']
# ++++++++++ Initialization ++++++++++
self.__getS()
self.__getL()
self.__normL()
self.__cls = np.zeros((self.__c, self.__xSam))
pass
def __getS(self):
D = eudist(self.__X, self.__X, False)
if self.__atype == 'one':
tmp = - D / self.__t
elif self.__atype == 'self':
M, index = sortMat(D, 'Row', 'Ascend')
d = M[:, self.__k]
dd = np.dot(d, np.transpose(d))
tmp = - D / dd
# ++++++++++ Exp Affinity ++++++++++
S = np.exp(tmp)
for i in range(self.__xSam):
S[i, i] = 0
N, index = sortMat(D, 'Row', 'Descend')
ind = index[:, 0:self.__k]
T = np.zeros((self.__xSam, self.__xSam))
for i in range(self.__xSam):
for j in range(self.__k):
tid = ind[i, j]
T[i, tid] = S[i, tid]
T = T + T
T = T * 0.5
self.__S = T
return True
def __getL(self):
tmp = np.sum(self.__S, axis=1)
self.__D = np.diag(tmp)
self.__L = self.__D - self.__S
return True
def __normL(self):
d = np.diag(self.__D)
d = d ** (- 0.5)
dd = np.diag(d)
tmp = np.dot(dd, self.__S)
tmq = np.dot(tmp, dd)
self.__nL = tmq
return True
def __updMeans(self):
for i in range(self.__c):
tmp = self.__cls[i, :]
n = np.sum(tmp)
tmq = repVec(tmp, self.__xDim)
tmx = tmq * self.__X
mx = np.sum(tmx, axis=1)
mx = mx / n
self.__km[:, i] = mx
return True
def __kmeans(self, X):
xDim, xSam = np.shape(X)
assert xDim == self.__c, 'The length of feature sizes are not identical !'
# ++++++++++ Initialize the means ++++++++++
ind = np.arange(xSam)
np.random.shuffle(ind)
ind = ind[0:self.__c]
self.__km = X[:, ind]
old_cls = self.__cls
for ii in range(self.__nIter):
d = eudist(X, self.__km, False)
dd, index = sortMat(d, 'Row', 'Ascend')
ind = index[:, 0]
# ++++++++++ Aligned samples ++++++++++
self.__cls = np.zeros((self.__c, self.__xSam))
for i in range(xSam):
tid = ind[i]
self.__cls[tid, i] = 1
self.__updMeans()
# ++++++++++ Check the convergency ++++++++++
tmp = self.__cls - old_cls
tmq = tmp * tmp
Obj = norm(tmq, 1)
str_ = 'The %d' %ii + '-th iteration: %f' %Obj
print(str_)
if Obj < self.__tol:
break
old_cls = self.__cls
return Obj
def __njw(self):
U, s, V = la.svd(self.__nL, full_matrices=False)
V = np.transpose(V)
s, r = getRank(s)
# ++++++++++ Normalization ++++++++++
U = U[:, 0:r]
cc = U[:, 0:self.__c]
tmp = cc * cc
tmq = sum(tmp, axis=1)
tmq = np.sqrt(tmq)
tm = tmp / tmq
tm = np.transpose(tm)
self.__kmeans(tm)
return True
# ++++++++++ Self-tuning clustering ++++++++++
def __GivensRotation(self, i, j, theta, size):
g = np.eye(size)
c = np.cos(theta)
s = np.sin(theta)
g[i, i] = 0
g[j, j] = 0
g[j, i] = 0
g[i, j] = 0
ii_mat = np.zeros_like(g)
ii_mat[i, i] = 1
jj_mat = np.zeros_like(g)
jj_mat[j, j] = 1
ji_mat = np.zeros_like(g)
ji_mat[j, i] = 1
ij_mat = np.zeros_like(g)
ij_mat[i, j] = 1
return g + c * ii_mat + c * jj_mat + s * ji_mat - s * ij_mat
def __generate_list(self, ij_list, theta_list, size):
return [self.__GivensRotation(ij[0], ij[1], theta, size)
for ij, theta in zip(ij_list, theta_list)]
def __rotation(self, X, c):
ij_list = [(i, j) for i in range(c) for j in range(c) if i < j]
def cost(self, X, c, ij_list, theta_list):
U_list = self.__generate_list(ij_list, theta_list, c)
R = reduce(np.dot, U_list, np.eye(c))
Z = X.dot(R)
M = np.max(Z, axis=1, keepdims=True)
N = np.sum((Z / M) ** 2)
return N
theta_list_init = np.array([0.0] * int(c * (c - 1) / 2))
opt = minimize(cost,
x0 = theta_list_init,
method = 'CG',
jac = grad(cost),
options = {'disp': False})
return opt.fun, reduce(np.dot, self.__generate_list(ij_list, opt.x, c), np.eye(c))
def __reformat(labels, n):
zipped_data = zip(labels, range(n))
zipped_data = sorted(zipped_data, key=lambda x: x[0])
grouped_feature_id = [[j[1] for j in i[1]] for i in groupby(zipped_data, lambda x: x[0])]
return grouped_feature_id
def __stsc(self):
U, s, V = la.svd(self.__nL, full_matrices=False)
V = np.transpose(V)
s, r = getRank(s)
#t = revArr(s)
ss = np.sum(s)
if ss < 2:
self.__max_Cls = 2
else:
self.__max_Cls = int(ss)
re = []
for i in range(self.__min_Cls, self.__max_Cls + 1):
tmv = U[:, :i]
cost, tmr = self.__rotation(tmv, i)
re.append((cost, tmv.dot(tmr)))
str_ = 'n_cluster: %d' %c + '\t cost: %f' %cost
print(str_)
COST, Z = sorted(re, key = lambda x: x[0])[0]
tm = self.__reformat(np.argmax(Z, axis=1), Z.shape[0])
return tm
def Learn(self):
if self.__ctype == 'stsc':
self.__stsc()
elif self.__ctype == 'njw':
self.__njw()
return True
def getLabel(self):
B, index = iMax(self.__cls, axis=0)
labels = index
return labels | pt | 0.384214 | # &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& # stsc.py # # This python file contains the definition of self-tuning spectral clustering method. # Reference: # <NAME> and <NAME>, Self-Tuning Spectral Clustering, in Proc. Neural # Information Processing Systems, Vancouver, Canada, 2004. # # Note: It is a trivial implementation in a literal manner, and has been put on hold for a term. Thus, # either accuracy and completeness are still unknown. # # <NAME> # Email: <EMAIL> # Date: 2021-07 # &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& # +++++ Parameters of STSC +++++ # +++++ Parameters of STSC +++++ # ++++++++++ Initialization ++++++++++ # ++++++++++ Exp Affinity ++++++++++ # ++++++++++ Initialize the means ++++++++++ # ++++++++++ Aligned samples ++++++++++ # ++++++++++ Check the convergency ++++++++++ # ++++++++++ Normalization ++++++++++ # ++++++++++ Self-tuning clustering ++++++++++ #t = revArr(s) | 2.537294 | 3 |
examples/ilya/tail_scaling02.py | radiasoft/rsdynfric | 0 | 6631837 | <reponame>radiasoft/rsdynfric<gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
D, F, DF = np.loadtxt("D_Fintegr_DtimesFintegr.txt", skiprows=0 , unpack=True)
n_e = 2.0e+15
dD = D[2]-D[1] #4.923725109213487e-07 # r1
#print n_e *2. *np.pi *dD *np.sum(DF)
plt.plot(D[-6:D.size], np.log( -DF[-6:D.size] ), 'ro')
plt.xlabel('D (m)')
plt.ylabel('$\ln ($|D * Line integral of $F_{\parallel} |)$')
plt.ticklabel_format(axis='both', style='sci', scilimits=(-2,2))
#plt.savefig('DtimesFintegr_tail_scaling.pdf')
plt.show()
#r = -(np.log(-DF[-1]) -np.log(-DF[-10])) / (D[-1] -D[-10])
r = -(np.log(-DF[-1]) -np.log(-DF[-6])) / (D[-1] -D[-6])
print 'r = ', r
S_corrn = n_e *2. *np.pi *dD *DF[-1] / (np.exp(r *dD) -1.)
print 'Correction to F = ', S_corrn
| import numpy as np
import matplotlib.pyplot as plt
D, F, DF = np.loadtxt("D_Fintegr_DtimesFintegr.txt", skiprows=0 , unpack=True)
n_e = 2.0e+15
dD = D[2]-D[1] #4.923725109213487e-07 # r1
#print n_e *2. *np.pi *dD *np.sum(DF)
plt.plot(D[-6:D.size], np.log( -DF[-6:D.size] ), 'ro')
plt.xlabel('D (m)')
plt.ylabel('$\ln ($|D * Line integral of $F_{\parallel} |)$')
plt.ticklabel_format(axis='both', style='sci', scilimits=(-2,2))
#plt.savefig('DtimesFintegr_tail_scaling.pdf')
plt.show()
#r = -(np.log(-DF[-1]) -np.log(-DF[-10])) / (D[-1] -D[-10])
r = -(np.log(-DF[-1]) -np.log(-DF[-6])) / (D[-1] -D[-6])
print 'r = ', r
S_corrn = n_e *2. *np.pi *dD *DF[-1] / (np.exp(r *dD) -1.)
print 'Correction to F = ', S_corrn | en | 0.289637 | #4.923725109213487e-07 # r1 #print n_e *2. *np.pi *dD *np.sum(DF) #plt.savefig('DtimesFintegr_tail_scaling.pdf') #r = -(np.log(-DF[-1]) -np.log(-DF[-10])) / (D[-1] -D[-10]) | 2.342899 | 2 |
book_api/tests/test_views_books.py | musflood/zonar-book-api | 0 | 6631838 | <reponame>musflood/zonar-book-api
"""Unit tests for the Book view functions."""
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden
from book_api.models.book import Book
from book_api.tests.conftest import FAKE
from book_api.views.books import (
_create_book, _delete_book, _list_books, _update_book, validate_user)
def test_validate_user_raises_error_for_incomplete_data(dummy_request):
"""Test that validate_user raises HTTPBadRequest for missing password."""
data = {
'email': FAKE.email()
}
with pytest.raises(HTTPBadRequest):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_email_not_in_database(dummy_request):
"""Test that validate_user raises HTTPForbidden for bad email."""
data = {
'email': FAKE.email(),
'password': 'password'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_incorrect_password(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': '<PASSWORD>'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_returns_user_matching_email(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password'
}
auth_user = validate_user(dummy_request.dbsession, data)
assert auth_user is one_user
def test_list_empty_for_user_with_no_books(dummy_request, db_session, one_user):
"""Test that list returns empty list for user with no books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert books == []
def test_create_raises_error_for_incomplete_post_data(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for missing title."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_adds_new_book_to_the_database(dummy_request, db_session, one_user):
"""Test that create adds a new Book to the database."""
db_session.add(one_user)
assert len(db_session.query(Book).all()) == 0
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
_create_book(dummy_request, one_user)
assert len(db_session.query(Book).all()) == 1
def test_create_returns_dict_with_new_book_data(dummy_request, db_session, one_user):
"""Test that create returns dict with the new Book's data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_create_creates_new_book_using_post_data(dummy_request, db_session, one_user):
"""Test that create uses POST data to create the new Book."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
for prop in ['title', 'author', 'isbn']:
assert getattr(new_book, prop) == data[prop]
assert new_book.pub_date.strftime('%m/%d/%Y') == data['pub_date']
def test_create_sets_email_user_as_owner_of_new_book(dummy_request, db_session, one_user):
"""Test that create uses email from POST data to set Book owner."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
assert one_user is new_book.user
def test_create_creates_new_book_with_none_values(dummy_request, db_session, one_user):
"""Test that create sets values to None when not given."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert res['author'] is None
assert res['isbn'] is None
assert res['pub_date'] is None
def test_list_has_all_books_for_user(dummy_request, db_session, one_user):
"""Test that list returns filled list for user with multiple books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert len(books) == len(one_user.books)
def test_list_returns_list_of_dict_with_book_data(dummy_request, db_session, one_user):
"""Test that list returns list of dict with the user Book data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
res = _list_books(dummy_request, one_user)
for book in res:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_update_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that update raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_update_book(dummy_request, book)
def test_update_changes_single_value_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
new_author = FAKE.name()
assert new_author != book.author
data = {
'email': one_user.email,
'password': 'password',
'author': new_author
}
dummy_request.POST = data
_update_book(dummy_request, book)
assert book.author == new_author
def test_update_changes_all_values_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) != data[prop]
dummy_request.POST = data
_update_book(dummy_request, book)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) == data[prop]
def test_update_returns_dict_with_updated_book_data(dummy_request, db_session, one_user):
"""Test that update returns dict with the new Book's data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _update_book(dummy_request, book)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_delete_returns_nothing(dummy_request, db_session, one_user):
"""Test that delete returns None."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
res = _delete_book(dummy_request, book)
assert res is None
def test_delete_removes_book_from_database(dummy_request, db_session, one_user):
"""Test that delete removes the given book from the database."""
db_session.add(one_user)
book = db_session.query(Book).first()
book_id = book.id
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
_delete_book(dummy_request, book)
db_session.commit()
assert db_session.query(Book).get(book_id) is None
| """Unit tests for the Book view functions."""
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden
from book_api.models.book import Book
from book_api.tests.conftest import FAKE
from book_api.views.books import (
_create_book, _delete_book, _list_books, _update_book, validate_user)
def test_validate_user_raises_error_for_incomplete_data(dummy_request):
"""Test that validate_user raises HTTPBadRequest for missing password."""
data = {
'email': FAKE.email()
}
with pytest.raises(HTTPBadRequest):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_email_not_in_database(dummy_request):
"""Test that validate_user raises HTTPForbidden for bad email."""
data = {
'email': FAKE.email(),
'password': 'password'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_incorrect_password(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': '<PASSWORD>'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_returns_user_matching_email(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password'
}
auth_user = validate_user(dummy_request.dbsession, data)
assert auth_user is one_user
def test_list_empty_for_user_with_no_books(dummy_request, db_session, one_user):
"""Test that list returns empty list for user with no books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert books == []
def test_create_raises_error_for_incomplete_post_data(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for missing title."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_adds_new_book_to_the_database(dummy_request, db_session, one_user):
"""Test that create adds a new Book to the database."""
db_session.add(one_user)
assert len(db_session.query(Book).all()) == 0
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
_create_book(dummy_request, one_user)
assert len(db_session.query(Book).all()) == 1
def test_create_returns_dict_with_new_book_data(dummy_request, db_session, one_user):
"""Test that create returns dict with the new Book's data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_create_creates_new_book_using_post_data(dummy_request, db_session, one_user):
"""Test that create uses POST data to create the new Book."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
for prop in ['title', 'author', 'isbn']:
assert getattr(new_book, prop) == data[prop]
assert new_book.pub_date.strftime('%m/%d/%Y') == data['pub_date']
def test_create_sets_email_user_as_owner_of_new_book(dummy_request, db_session, one_user):
"""Test that create uses email from POST data to set Book owner."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
assert one_user is new_book.user
def test_create_creates_new_book_with_none_values(dummy_request, db_session, one_user):
"""Test that create sets values to None when not given."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert res['author'] is None
assert res['isbn'] is None
assert res['pub_date'] is None
def test_list_has_all_books_for_user(dummy_request, db_session, one_user):
"""Test that list returns filled list for user with multiple books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert len(books) == len(one_user.books)
def test_list_returns_list_of_dict_with_book_data(dummy_request, db_session, one_user):
"""Test that list returns list of dict with the user Book data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
res = _list_books(dummy_request, one_user)
for book in res:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_update_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that update raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_update_book(dummy_request, book)
def test_update_changes_single_value_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
new_author = FAKE.name()
assert new_author != book.author
data = {
'email': one_user.email,
'password': 'password',
'author': new_author
}
dummy_request.POST = data
_update_book(dummy_request, book)
assert book.author == new_author
def test_update_changes_all_values_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) != data[prop]
dummy_request.POST = data
_update_book(dummy_request, book)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) == data[prop]
def test_update_returns_dict_with_updated_book_data(dummy_request, db_session, one_user):
"""Test that update returns dict with the new Book's data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _update_book(dummy_request, book)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_delete_returns_nothing(dummy_request, db_session, one_user):
"""Test that delete returns None."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
res = _delete_book(dummy_request, book)
assert res is None
def test_delete_removes_book_from_database(dummy_request, db_session, one_user):
"""Test that delete removes the given book from the database."""
db_session.add(one_user)
book = db_session.query(Book).first()
book_id = book.id
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
_delete_book(dummy_request, book)
db_session.commit()
assert db_session.query(Book).get(book_id) is None | en | 0.715065 | Unit tests for the Book view functions. Test that validate_user raises HTTPBadRequest for missing password. Test that validate_user raises HTTPForbidden for bad email. Test that validate_user raises HTTPForbidden for bad email. Test that validate_user raises HTTPForbidden for bad email. Test that list returns empty list for user with no books. Test that create raises HTTPBadRequest for missing title. Test that create raises HTTPBadRequest for incorrect date format. Test that create adds a new Book to the database. Test that create returns dict with the new Book's data. Test that create uses POST data to create the new Book. Test that create uses email from POST data to set Book owner. Test that create sets values to None when not given. Test that list returns filled list for user with multiple books. Test that list returns list of dict with the user Book data. Test that update raises HTTPBadRequest for incorrect date format. Test that update changes the values on the given book from POST data. Test that update changes the values on the given book from POST data. Test that update returns dict with the new Book's data. Test that delete returns None. Test that delete removes the given book from the database. | 2.757668 | 3 |
snoop/data/migrations/0033_auto_20200512_0911.py | liquidinvestigations/hoover-snoop2 | 0 | 6631839 | <gh_stars>0
# Generated by Django 3.0.4 on 2020-05-12 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0032_remove_ocrsource_root'),
]
operations = [
migrations.AlterField(
model_name='ocrsource',
name='name',
field=models.CharField(max_length=1024, unique=True),
),
]
| # Generated by Django 3.0.4 on 2020-05-12 09:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data', '0032_remove_ocrsource_root'),
]
operations = [
migrations.AlterField(
model_name='ocrsource',
name='name',
field=models.CharField(max_length=1024, unique=True),
),
] | en | 0.821412 | # Generated by Django 3.0.4 on 2020-05-12 09:11 | 1.582688 | 2 |
eparted/src/eparted.py | builder08/enigma2-plugins_2 | 0 | 6631840 | <gh_stars>0
# -*- coding: utf-8 -*-
# code by GeminiTeam
from __future__ import print_function
from enigma import eTimer
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.Label import Label
from Components.Pixmap import MultiPixmap
from Components.Sources.StaticText import StaticText
from Components.Sources.List import List
from Components.ActionMap import ActionMap
from Components.MenuList import MenuList
from Components.Console import Console
from Tools.Directories import pathExists, createDir
from Tools.BoundFunction import boundFunction
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import resolveFilename, SCOPE_SKIN
SkinDefaultPath = resolveFilename(SCOPE_SKIN, "skin_default/")
from Components.ConfigList import ConfigListScreen
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigInteger, ConfigYesNo, ConfigText, ConfigSelection, NoSave
config.plugins.eparted = ConfigSubsection()
from locale import _
from os import system as os_system, path as os_path, listdir
#from Plugins.Bp.geminimain.gTools import cleanexit
LIST_TYPE_DEV = 0
LIST_TYPE_PAR = 1
LIST_TYPE = 0
DEV_PATH = 1
DEV_SIZE = 2
DEV_TYPE = 3
DEV_NAME = 7
PA_NR = 1
PA_START = 2
PA_END = 3
PA_SIZE = 4
PA_FS = 5
PA_TYPE = 6
PA_NAME = 7
#-----------------------------------------------------------------------------
def getInt_epart(val):
try:
return int(float(val[0:-2]))#Einheit abschneiden
except:
return 0
def parseCmd(result):
devlist = []
try:
entry = []
addok = False
for x in result.split('\n'):
#if x=="BYT;":#start
if x.find("BYT;") >= 0:
addok = True
elif x == "":#end
if addok and len(entry):
devlist.append(entry)
addok = False
entry = []
else:
if addok and len(x) > 1 and x[len(x) - 1] == ';':
l = x.split(':')
if len(l) == 7:#Part
l.insert(0, LIST_TYPE_PAR)
l[PA_START] = getInt_epart(l[PA_START])
l[PA_END] = getInt_epart(l[PA_END])
l[PA_SIZE] = getInt_epart(l[PA_SIZE])
l[PA_NAME] = ""
if l[PA_FS].find("linux-swap") == 0:
l[PA_FS] = "linux-swap"
entry.append(l)
elif len(l) == 8:#Device
if l[0].find("/dev/mtd") < 0:
l.insert(0, LIST_TYPE_DEV)
entry.append(l)
except:
print("[eParted] <parse error>")
return []
return devlist
def myExecute(cmd, session, test=False):
if test:
from time import sleep
sleep(5)
result = 0
else:
res = os_system(cmd)
result = (res >> 8)
print("[eParted]", result, cmd)
if result != 0 and session is not None:
session.open(MessageBox, _("Error command '%s'") % cmd, MessageBox.TYPE_ERROR, timeout=8)
return result
def getMountP():
try:
mounts = open("/proc/mounts")
except IOError:
return []
lines = mounts.readlines()
mounts.close()
return lines
def ismounted(dev):
for x in getMountP():
parts = x.strip().split(" ")
if len(parts) > 1:
realpath = os_path.realpath(parts[0])
if realpath == dev:
return parts[1]
return None
rereaddevices = False
#-------------------------------------------------------------------------------------
class Ceparted(Screen):
skin = """<screen position="center,center" size="600,200" title="eParted v0.13">
<widget name="list" position="5,5" size="590,190" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.Exit,
"ok": self.Ok
}, -1)
self["list"] = MenuList(list=[])
self.Console = Console()
global rereaddevices
rereaddevices = True
self.__readDev()
def Ok(self):
sel = self["list"].getCurrent()
if sel and sel[1]:
global rereaddevices
rereaddevices = False
self.session.openWithCallback(self.__readDev, Cpart, sel[1])
def __readDev(self):
global rereaddevices
if rereaddevices:
self.Console.ePopen("parted -m -l", self.__FinishedConsole)
def Exit(self):
self.Console.killAll()
self.close()
#cleanexit(__name__)
def __FinishedConsole(self, result, retval, extra_args=None):
if retval == 0 and '\n' in result:
list = []
for x in parseCmd(result):
if x[0][LIST_TYPE] == LIST_TYPE_DEV:
name = x[0][DEV_NAME]
if len(name) == 0:
name = x[0][DEV_PATH]
tstr = name
tstr += " (%s - %d %s %s)" % (x[0][DEV_SIZE], len(x) - 1, _("partition(s)"), x[0][DEV_PATH])
list.append((tstr, (name, x[0][DEV_PATH], x[0][DEV_SIZE])))
self["list"].setList(list)
#-------------------------------------------------------------------------------------
class AddPart(Screen, ConfigListScreen):
skin = """<screen name="AddPart" position="center,center" size="600,190" title="add Partition" >
<ePixmap pixmap="skin_default/buttons/red.png" position="5,5" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="155,5" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<widget render="Label" source="key_red" position="5,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" />
<widget render="Label" source="key_green" position="155,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" />
<widget name="config" position="5,60" size="590,120" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, maxsize, unit, countpart):
Screen.__init__(self, session)
self.session = session
self.setup_title = _("add partition")
menu = []
default = "ext3"
if pathExists("/sbin/mkfs.ext2"):
menu.append("ext2")
if pathExists("/sbin/mkfs.ext3"):
menu.append("ext3")
if pathExists("/sbin/mkfs.ext4"):
menu.append("ext4")
default = "ext4"
if pathExists("/sbin/mkfs.xfs"):
menu.append("xfs")
if pathExists("/sbin/mkswap"):
menu.append("linux-swap")
if pathExists("/sbin/mkfs.vfat"):
menu.append("fat32")
if pathExists("/usr/sbin/mkfs.msdos"):
menu.append("fat16")
config.plugins.eparted.fs = NoSave(ConfigSelection(default=default, choices=menu))
config.plugins.eparted.size = NoSave(ConfigInteger(default=maxsize, limits=[1, maxsize]))
list = []
if countpart < 4:#nur 4 parts möglich bei primary
list.append(getConfigListEntry(_("size in %s (max %d %s):") % (unit, maxsize, unit), config.plugins.eparted.size))
list.append(getConfigListEntry(_("filesystem:"), config.plugins.eparted.fs))
ConfigListScreen.__init__(self, list, session=session)
self["key_red"] = StaticText(_("cancel"))
self["key_green"] = StaticText(_("ok"))
self["setupActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"red": self.keyCancel,
"cancel": self.keyCancel,
"green": self.keySave,
"save": self.keySave,
"ok": self.keySave,
}, -2)
def keyCancel(self):
self.close()
def keySave(self):
if config.plugins.eparted.size.value > 0:
self.close((config.plugins.eparted.size.value, config.plugins.eparted.fs.value))
#-------------------------------------------------------------------------------------
class Cpart(Screen):
PA_TYPE_USE = 1
PA_TYPE_LAST = 2
PA_TYPE_FREE = 4
skin = """<screen position="center,center" size="670,200" title="eParted">
<widget source="list" render="Listbox" position="0,0" size="670,160" scrollbarMode="showOnDemand" enableWrapAround="on">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (0,5), size = (50, 30), font=0, flags = RT_HALIGN_LEFT, text=0),
MultiContentEntryText(pos = (60,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=1),
MultiContentEntryText(pos = (210,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=2),
MultiContentEntryText(pos = (360,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=3),
MultiContentEntryText(pos = (510,5), size = (160, 30), font=0, flags = RT_HALIGN_LEFT, text=4)
],
"fonts": [gFont("Regular", 20)],
"itemHeight": 35
}
</convert>
</widget>
<widget name="PixmapRed" position="25,170" size="15,16" pixmaps="skin_default/buttons/button_red_off.png,skin_default/buttons/button_red.png" transparent="1" alphatest="on" />
<widget name="LabelRed" position="50,160" size="150,40" font="Regular;19" valign="center" />
<widget name="PixmapGreen" position="225,170" size="15,16" pixmaps="skin_default/buttons/button_green_off.png,skin_default/buttons/button_green.png" transparent="1" alphatest="on" />
<widget name="LabelGreen" position="250,160" size="150,40" font="Regular;19" valign="center" />
<widget name="PixmapBlue" position="425,170" size="15,16" pixmaps="skin_default/buttons/button_blue_off.png,skin_default/buttons/button_blue.png" transparent="1" alphatest="on" />
<widget name="LabelBlue" position="450,160" size="150,40" font="Regular;19" valign="center" />
</screen>"""
def __init__(self, session, entry):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"cancel": self.Exit,
"green": self.KeyGreen,
"blue": self.KeyBlue,
"red": self.KeyRed
}, -1)
self["list"] = List(list=[])
self["list"].onSelectionChanged.append(self.__SetLabels)
self["PixmapRed"] = MultiPixmap()
self["PixmapGreen"] = MultiPixmap()
self["PixmapBlue"] = MultiPixmap()
self["LabelRed"] = Label()
self["LabelGreen"] = Label()
self["LabelBlue"] = Label()
self.__devpath = entry[DEV_PATH]
self.__fullsize = 0
self.__old_part_list = []
self.__new_part_list = []
self.__comlist = []
self.__unit = entry[2][len(entry[2]) - 2:]
self.Console = Console()
self.__getPartInfo()
def Exit(self):
self.Console.killAll()
self.close()
def __getPartInfo(self, val=None):
self.Console.ePopen("parted -m %s unit %s print" % (self.__devpath, self.__unit), self.__FinishedConsole)
def __Filllist(self):
list = []
index = self["list"].getIndex()
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
#print x
p0 = "%s: %s" % (_("Nr"), x[PA_NR])
p1 = "%s: %d%s" % (_("Start"), x[PA_START], self.__unit)
p2 = "%s: %d%s" % (_("End"), x[PA_END], self.__unit)
p3 = "%s: %d%s" % (_("Size"), x[PA_SIZE], self.__unit)
p4 = "%s: %s" % (_("Type"), x[PA_FS])
list.append((p0, p1, p2, p3, p4, x))
self["list"].setList(list)
self["list"].setIndex(index)
self.__createCommandList()
def __SetLabels(self):
sel = self["list"].getCurrent()
self["LabelGreen"].setText("")
self["LabelRed"].setText("")
if sel and sel[5]:
if sel[5][PA_TYPE] & self.PA_TYPE_FREE and len(self.__new_part_list) < 6:
self["PixmapGreen"].setPixmapNum(1)
self["LabelGreen"].setText(_("add"))
else:
self["PixmapGreen"].setPixmapNum(0)
if sel[5][PA_TYPE] & self.PA_TYPE_LAST and bool(sel[5][PA_TYPE] & self.PA_TYPE_FREE) == False:
self["PixmapRed"].setPixmapNum(1)
self["LabelRed"].setText(_("delete"))
else:
self["PixmapRed"].setPixmapNum(0)
def __addFreePart(self, plist, lastPartEnd):
x = [LIST_TYPE_PAR, str(len(plist)), lastPartEnd, self.__fullsize, 0, _("free"), (self.PA_TYPE_FREE | self.PA_TYPE_LAST), ";"]
plist.append(x)
def __FinishedConsole(self, result, retval, extra_args=None):
if retval == 0 and '\n' in result:
tlist = parseCmd(result)
if len(tlist):
self.__old_part_list = tlist[0][:]
self.__new_part_list = tlist[0][:]
lastPartEnd = 0
count = 2
for x in self.__old_part_list:
if x[LIST_TYPE] == LIST_TYPE_DEV:
self.__fullsize = getInt_epart(x[DEV_SIZE])
name = x[DEV_NAME]
if len(name) == 0:
name = x[DEV_PATH]
name += " (%s)" % x[DEV_SIZE]
self.setTitle(name)
else:
lastPartEnd = x[PA_END]
x[PA_TYPE] = self.PA_TYPE_USE
if count == len(self.__old_part_list):#is letzte part
x[PA_TYPE] |= self.PA_TYPE_LAST
count += 1
if lastPartEnd < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__old_part_list, lastPartEnd)
self.__addFreePart(self.__new_part_list, lastPartEnd)
self.__Filllist()
def KeyBlue(self):
if len(self.__comlist):
self.session.openWithCallback(self.__getPartInfo, Cpartexe, self.__comlist)
def KeyRed(self):
sel = self["list"].getCurrent()
if sel and sel[1] and sel[5][PA_TYPE] & self.PA_TYPE_LAST and bool(sel[5][PA_TYPE] & self.PA_TYPE_FREE) == False:
try:
self.__new_part_list.remove(sel[5])#aktuelle part löschen
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
if x[PA_TYPE] & self.PA_TYPE_FREE:#letzte Freie suchen und auch löschen
self.__new_part_list.remove(x)
break
else:
x[PA_TYPE] = self.PA_TYPE_USE
lastPartEnd = 0
if len(self.__new_part_list) > 1:#von letzter Part, TYp setzen und Ende ermitteln
self.__new_part_list[len(self.__new_part_list) - 1][PA_TYPE] = self.PA_TYPE_USE | self.PA_TYPE_LAST
lastPartEnd = self.__new_part_list[len(self.__new_part_list) - 1][PA_END]
if lastPartEnd < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__new_part_list, lastPartEnd)
#for x in self.__new_part_list:
# if x[LIST_TYPE]==LIST_TYPE_PAR:
# print x
except:
print("[eParted] <remove part>")
self.__Filllist()
def KeyGreen(self):
sel = self["list"].getCurrent()
if sel and sel[5] and sel[5][PA_TYPE] & self.PA_TYPE_FREE and sel[5][PA_START] < sel[5][PA_END] and len(self.__new_part_list) < 6:
self.session.openWithCallback(self.__CallbackAddPart, AddPart, sel[5][PA_END] - sel[5][PA_START], self.__unit, len(self.__new_part_list) - 1)
def __CallbackAddPart(self, val=None):
if val:
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
if x[PA_TYPE] & self.PA_TYPE_FREE:
x[PA_SIZE] = val[0]
x[PA_FS] = val[1]
x[PA_END] = x[PA_START] + x[PA_SIZE]
x[PA_TYPE] = self.PA_TYPE_USE | self.PA_TYPE_LAST
if x[PA_END] < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__new_part_list, x[PA_END])
break
else:
x[PA_TYPE] = self.PA_TYPE_USE
self.__Filllist()
def __addPart2Comlist(self, list, val, mkpart=True):
#print val
partnr = val[PA_NR]
if mkpart:
fs = val[PA_FS]
com = "parted -s -a optimal %s mkpart primary %s %s%s %s%s" % (self.__devpath, fs, val[PA_START], self.__unit, val[PA_END], self.__unit)
list.append((com, _("create partition %s") % partnr, None))
mountdev = None
if val[PA_FS] == "linux-swap":
mkfs = "/sbin/mkswap"
elif val[PA_FS] == "fat16":
mkfs = "/usr/sbin/mkfs.msdos -F 16"
elif val[PA_FS] == "fat32":
mkfs = "/sbin/mkfs.vfat"
else:
mkfs = "/sbin/mkfs." + val[PA_FS]
mountdev = self.__devpath + partnr
if val[PA_FS] == "xfs":
mkfs += " -f"
com = "%s %s%s" % (mkfs, self.__devpath, partnr)
list.append((com, _("make filesystem '%s' on partition %s (%d %s)") % (val[PA_FS], partnr, val[PA_SIZE], self.__unit), mountdev))
def __delPart2Comlist(self, list, val):
partnr = val[PA_NR]
dev = "%s%s" % (self.__devpath, partnr)
mp = ismounted(dev)
if mp is not None:
if myExecute("umount %s" % mp, self.session):
return
list.insert(0, ("parted -s -a none %s rm %s" % (self.__devpath, partnr), _("delete partition %s") % partnr, None))
def __createCommandList(self):
self.__comlist = []
#welche parts sollen gelöscht werden
for x in range(len(self.__old_part_list)):
if self.__old_part_list[x][LIST_TYPE] == LIST_TYPE_PAR:
if bool(self.__old_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if len(self.__new_part_list) > x:
if self.__old_part_list[x][PA_SIZE] != self.__new_part_list[x][PA_SIZE]:
#print self.__old_part_list[x], self.__new_part_list[x]
self.__delPart2Comlist(self.__comlist, self.__old_part_list[x])
else:
self.__delPart2Comlist(self.__comlist, self.__old_part_list[x])
#welche parts sollen erstellt werden
for x in range(len(self.__new_part_list)):
if self.__new_part_list[x][LIST_TYPE] == LIST_TYPE_PAR:
if bool(self.__new_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if len(self.__old_part_list) > x and bool(self.__old_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if self.__new_part_list[x][PA_SIZE] != self.__old_part_list[x][PA_SIZE]:
#print self.__new_part_list[x], self.__old_part_list[x]
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x])
else:
if self.__new_part_list[x][PA_FS] != self.__old_part_list[x][PA_FS]:
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x], False)
else:
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x])
#for x in self.__comlist: print "[eParted] com =",x
if len(self.__comlist):
self["PixmapBlue"].setPixmapNum(1)
self["LabelBlue"].setText(_("execute"))
else:
self["PixmapBlue"].setPixmapNum(0)
self["LabelBlue"].setText("")
class Cpartexe(Screen):
skin = """<screen position="center,center" size="670,400" title=" ">
<widget source="list" render="Listbox" position="0,0" size="670,360" scrollbarMode="showOnDemand" enableWrapAround="on">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (40,5), size = (630, 30), font=0, flags = RT_HALIGN_LEFT, text=0),
MultiContentEntryPixmapAlphaTest(pos = (5, 5), size = (35,35), png=1),
],
"fonts": [gFont("Regular", 22)],
"itemHeight": 40
}
</convert>
</widget>
<widget name="PixmapButton" position="25,370" size="15,16" pixmaps="skin_default/buttons/button_green.png,skin_default/buttons/button_green_off.png" transparent="1" alphatest="on" />
<widget name="LabelButton" position="50,360" size="620,40" font="Regular;19" valign="center" />
</screen>"""
def __init__(self, session, comlist):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"cancel": self.Exit,
"green": self.KeyGreen,
#"red": self.KeyRed
}, -1)
self.setTitle(_("execute"))
self["PixmapButton"] = MultiPixmap()
self["LabelButton"] = Label(_("Start") + " ?")
self.mountlist = []
list = []
for x in comlist:
print(x)
list.append((x[1], None, x[0]))
if x[2] is not None:
self.mountlist.append(x[2])
self["list"] = List(list)
self.__Stimer = eTimer()
self.__Stimer.callback.append(self.__exeList)
self.__state = -1
def __getPartitionUUID(self, device):
try:
if os_path.exists("/dev/disk/by-uuid"):
for uuid in listdir("/dev/disk/by-uuid/"):
if not os_path.exists("/dev/disk/by-uuid/" + uuid):
return None
if os_path.realpath("/dev/disk/by-uuid/" + uuid) == device:
return ("/dev/disk/by-uuid/" + uuid, uuid)
else:
return (device, device[5:])
except:
print("[eParted] <error get UUID>")
return None
def __mountDevice(self):
for x in self.mountlist:
dev = self.__getPartitionUUID(x)
if dev is not None:
if os_path.exists("/media/" + dev[1]) == False:
createDir("/media/" + dev[1], True)
cmd = "mount %s /media/%s" % (dev[0], dev[1])
myExecute(cmd, None)
def Exit(self):
if self.__state < 0:
del self.__Stimer
self.__mountDevice()
self.close()
def __exeList(self):
if len(self["list"].list) > self.__state and self.__state > -1:
res = myExecute(self["list"].list[self.__state][2], self.session)
pic = "test_false.png"
if res == 0:
pic = "test_true.png"
self["list"].list[self.__state] = (self["list"].list[self.__state][0], LoadPixmap(path=SkinDefaultPath + pic), self["list"].list[self.__state][2], self["list"].list[self.__state][2])
self["list"].updateList(self["list"].list)
self["list"].setIndex(self.__state)
if res == 0:
self.__state += 1
else:
self.__state = len(self["list"].list)#bei fehler ans Ende der liste
self["PixmapButton"].setPixmapNum(0)
self["LabelButton"].setText(_("quit"))
self.__Stimer.start(500, True)
else:
self.__state = -2
self["PixmapButton"].setPixmapNum(0)
self["LabelButton"].setText(_("quit"))
def KeyGreen(self):
if self.__state == -1:
global rereaddevices
rereaddevices = True
self.__state += 1
self["PixmapButton"].setPixmapNum(1)
self["LabelButton"].setText(_("Please Wait"))
self["list"].setIndex(0)
self.__Stimer.start(500, True)
elif self.__state == -2:
self.Exit()
#def KeyRed(self):
# self.Exit()
| # -*- coding: utf-8 -*-
# code by GeminiTeam
from __future__ import print_function
from enigma import eTimer
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.Label import Label
from Components.Pixmap import MultiPixmap
from Components.Sources.StaticText import StaticText
from Components.Sources.List import List
from Components.ActionMap import ActionMap
from Components.MenuList import MenuList
from Components.Console import Console
from Tools.Directories import pathExists, createDir
from Tools.BoundFunction import boundFunction
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import resolveFilename, SCOPE_SKIN
SkinDefaultPath = resolveFilename(SCOPE_SKIN, "skin_default/")
from Components.ConfigList import ConfigListScreen
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigInteger, ConfigYesNo, ConfigText, ConfigSelection, NoSave
config.plugins.eparted = ConfigSubsection()
from locale import _
from os import system as os_system, path as os_path, listdir
#from Plugins.Bp.geminimain.gTools import cleanexit
LIST_TYPE_DEV = 0
LIST_TYPE_PAR = 1
LIST_TYPE = 0
DEV_PATH = 1
DEV_SIZE = 2
DEV_TYPE = 3
DEV_NAME = 7
PA_NR = 1
PA_START = 2
PA_END = 3
PA_SIZE = 4
PA_FS = 5
PA_TYPE = 6
PA_NAME = 7
#-----------------------------------------------------------------------------
def getInt_epart(val):
try:
return int(float(val[0:-2]))#Einheit abschneiden
except:
return 0
def parseCmd(result):
devlist = []
try:
entry = []
addok = False
for x in result.split('\n'):
#if x=="BYT;":#start
if x.find("BYT;") >= 0:
addok = True
elif x == "":#end
if addok and len(entry):
devlist.append(entry)
addok = False
entry = []
else:
if addok and len(x) > 1 and x[len(x) - 1] == ';':
l = x.split(':')
if len(l) == 7:#Part
l.insert(0, LIST_TYPE_PAR)
l[PA_START] = getInt_epart(l[PA_START])
l[PA_END] = getInt_epart(l[PA_END])
l[PA_SIZE] = getInt_epart(l[PA_SIZE])
l[PA_NAME] = ""
if l[PA_FS].find("linux-swap") == 0:
l[PA_FS] = "linux-swap"
entry.append(l)
elif len(l) == 8:#Device
if l[0].find("/dev/mtd") < 0:
l.insert(0, LIST_TYPE_DEV)
entry.append(l)
except:
print("[eParted] <parse error>")
return []
return devlist
def myExecute(cmd, session, test=False):
if test:
from time import sleep
sleep(5)
result = 0
else:
res = os_system(cmd)
result = (res >> 8)
print("[eParted]", result, cmd)
if result != 0 and session is not None:
session.open(MessageBox, _("Error command '%s'") % cmd, MessageBox.TYPE_ERROR, timeout=8)
return result
def getMountP():
try:
mounts = open("/proc/mounts")
except IOError:
return []
lines = mounts.readlines()
mounts.close()
return lines
def ismounted(dev):
for x in getMountP():
parts = x.strip().split(" ")
if len(parts) > 1:
realpath = os_path.realpath(parts[0])
if realpath == dev:
return parts[1]
return None
rereaddevices = False
#-------------------------------------------------------------------------------------
class Ceparted(Screen):
skin = """<screen position="center,center" size="600,200" title="eParted v0.13">
<widget name="list" position="5,5" size="590,190" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.Exit,
"ok": self.Ok
}, -1)
self["list"] = MenuList(list=[])
self.Console = Console()
global rereaddevices
rereaddevices = True
self.__readDev()
def Ok(self):
sel = self["list"].getCurrent()
if sel and sel[1]:
global rereaddevices
rereaddevices = False
self.session.openWithCallback(self.__readDev, Cpart, sel[1])
def __readDev(self):
global rereaddevices
if rereaddevices:
self.Console.ePopen("parted -m -l", self.__FinishedConsole)
def Exit(self):
self.Console.killAll()
self.close()
#cleanexit(__name__)
def __FinishedConsole(self, result, retval, extra_args=None):
if retval == 0 and '\n' in result:
list = []
for x in parseCmd(result):
if x[0][LIST_TYPE] == LIST_TYPE_DEV:
name = x[0][DEV_NAME]
if len(name) == 0:
name = x[0][DEV_PATH]
tstr = name
tstr += " (%s - %d %s %s)" % (x[0][DEV_SIZE], len(x) - 1, _("partition(s)"), x[0][DEV_PATH])
list.append((tstr, (name, x[0][DEV_PATH], x[0][DEV_SIZE])))
self["list"].setList(list)
#-------------------------------------------------------------------------------------
class AddPart(Screen, ConfigListScreen):
skin = """<screen name="AddPart" position="center,center" size="600,190" title="add Partition" >
<ePixmap pixmap="skin_default/buttons/red.png" position="5,5" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="155,5" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<widget render="Label" source="key_red" position="5,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" />
<widget render="Label" source="key_green" position="155,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" />
<widget name="config" position="5,60" size="590,120" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, maxsize, unit, countpart):
Screen.__init__(self, session)
self.session = session
self.setup_title = _("add partition")
menu = []
default = "ext3"
if pathExists("/sbin/mkfs.ext2"):
menu.append("ext2")
if pathExists("/sbin/mkfs.ext3"):
menu.append("ext3")
if pathExists("/sbin/mkfs.ext4"):
menu.append("ext4")
default = "ext4"
if pathExists("/sbin/mkfs.xfs"):
menu.append("xfs")
if pathExists("/sbin/mkswap"):
menu.append("linux-swap")
if pathExists("/sbin/mkfs.vfat"):
menu.append("fat32")
if pathExists("/usr/sbin/mkfs.msdos"):
menu.append("fat16")
config.plugins.eparted.fs = NoSave(ConfigSelection(default=default, choices=menu))
config.plugins.eparted.size = NoSave(ConfigInteger(default=maxsize, limits=[1, maxsize]))
list = []
if countpart < 4:#nur 4 parts möglich bei primary
list.append(getConfigListEntry(_("size in %s (max %d %s):") % (unit, maxsize, unit), config.plugins.eparted.size))
list.append(getConfigListEntry(_("filesystem:"), config.plugins.eparted.fs))
ConfigListScreen.__init__(self, list, session=session)
self["key_red"] = StaticText(_("cancel"))
self["key_green"] = StaticText(_("ok"))
self["setupActions"] = ActionMap(["SetupActions", "ColorActions"],
{
"red": self.keyCancel,
"cancel": self.keyCancel,
"green": self.keySave,
"save": self.keySave,
"ok": self.keySave,
}, -2)
def keyCancel(self):
self.close()
def keySave(self):
if config.plugins.eparted.size.value > 0:
self.close((config.plugins.eparted.size.value, config.plugins.eparted.fs.value))
#-------------------------------------------------------------------------------------
class Cpart(Screen):
PA_TYPE_USE = 1
PA_TYPE_LAST = 2
PA_TYPE_FREE = 4
skin = """<screen position="center,center" size="670,200" title="eParted">
<widget source="list" render="Listbox" position="0,0" size="670,160" scrollbarMode="showOnDemand" enableWrapAround="on">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (0,5), size = (50, 30), font=0, flags = RT_HALIGN_LEFT, text=0),
MultiContentEntryText(pos = (60,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=1),
MultiContentEntryText(pos = (210,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=2),
MultiContentEntryText(pos = (360,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=3),
MultiContentEntryText(pos = (510,5), size = (160, 30), font=0, flags = RT_HALIGN_LEFT, text=4)
],
"fonts": [gFont("Regular", 20)],
"itemHeight": 35
}
</convert>
</widget>
<widget name="PixmapRed" position="25,170" size="15,16" pixmaps="skin_default/buttons/button_red_off.png,skin_default/buttons/button_red.png" transparent="1" alphatest="on" />
<widget name="LabelRed" position="50,160" size="150,40" font="Regular;19" valign="center" />
<widget name="PixmapGreen" position="225,170" size="15,16" pixmaps="skin_default/buttons/button_green_off.png,skin_default/buttons/button_green.png" transparent="1" alphatest="on" />
<widget name="LabelGreen" position="250,160" size="150,40" font="Regular;19" valign="center" />
<widget name="PixmapBlue" position="425,170" size="15,16" pixmaps="skin_default/buttons/button_blue_off.png,skin_default/buttons/button_blue.png" transparent="1" alphatest="on" />
<widget name="LabelBlue" position="450,160" size="150,40" font="Regular;19" valign="center" />
</screen>"""
def __init__(self, session, entry):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"cancel": self.Exit,
"green": self.KeyGreen,
"blue": self.KeyBlue,
"red": self.KeyRed
}, -1)
self["list"] = List(list=[])
self["list"].onSelectionChanged.append(self.__SetLabels)
self["PixmapRed"] = MultiPixmap()
self["PixmapGreen"] = MultiPixmap()
self["PixmapBlue"] = MultiPixmap()
self["LabelRed"] = Label()
self["LabelGreen"] = Label()
self["LabelBlue"] = Label()
self.__devpath = entry[DEV_PATH]
self.__fullsize = 0
self.__old_part_list = []
self.__new_part_list = []
self.__comlist = []
self.__unit = entry[2][len(entry[2]) - 2:]
self.Console = Console()
self.__getPartInfo()
def Exit(self):
self.Console.killAll()
self.close()
def __getPartInfo(self, val=None):
self.Console.ePopen("parted -m %s unit %s print" % (self.__devpath, self.__unit), self.__FinishedConsole)
def __Filllist(self):
list = []
index = self["list"].getIndex()
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
#print x
p0 = "%s: %s" % (_("Nr"), x[PA_NR])
p1 = "%s: %d%s" % (_("Start"), x[PA_START], self.__unit)
p2 = "%s: %d%s" % (_("End"), x[PA_END], self.__unit)
p3 = "%s: %d%s" % (_("Size"), x[PA_SIZE], self.__unit)
p4 = "%s: %s" % (_("Type"), x[PA_FS])
list.append((p0, p1, p2, p3, p4, x))
self["list"].setList(list)
self["list"].setIndex(index)
self.__createCommandList()
def __SetLabels(self):
sel = self["list"].getCurrent()
self["LabelGreen"].setText("")
self["LabelRed"].setText("")
if sel and sel[5]:
if sel[5][PA_TYPE] & self.PA_TYPE_FREE and len(self.__new_part_list) < 6:
self["PixmapGreen"].setPixmapNum(1)
self["LabelGreen"].setText(_("add"))
else:
self["PixmapGreen"].setPixmapNum(0)
if sel[5][PA_TYPE] & self.PA_TYPE_LAST and bool(sel[5][PA_TYPE] & self.PA_TYPE_FREE) == False:
self["PixmapRed"].setPixmapNum(1)
self["LabelRed"].setText(_("delete"))
else:
self["PixmapRed"].setPixmapNum(0)
def __addFreePart(self, plist, lastPartEnd):
x = [LIST_TYPE_PAR, str(len(plist)), lastPartEnd, self.__fullsize, 0, _("free"), (self.PA_TYPE_FREE | self.PA_TYPE_LAST), ";"]
plist.append(x)
def __FinishedConsole(self, result, retval, extra_args=None):
if retval == 0 and '\n' in result:
tlist = parseCmd(result)
if len(tlist):
self.__old_part_list = tlist[0][:]
self.__new_part_list = tlist[0][:]
lastPartEnd = 0
count = 2
for x in self.__old_part_list:
if x[LIST_TYPE] == LIST_TYPE_DEV:
self.__fullsize = getInt_epart(x[DEV_SIZE])
name = x[DEV_NAME]
if len(name) == 0:
name = x[DEV_PATH]
name += " (%s)" % x[DEV_SIZE]
self.setTitle(name)
else:
lastPartEnd = x[PA_END]
x[PA_TYPE] = self.PA_TYPE_USE
if count == len(self.__old_part_list):#is letzte part
x[PA_TYPE] |= self.PA_TYPE_LAST
count += 1
if lastPartEnd < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__old_part_list, lastPartEnd)
self.__addFreePart(self.__new_part_list, lastPartEnd)
self.__Filllist()
def KeyBlue(self):
if len(self.__comlist):
self.session.openWithCallback(self.__getPartInfo, Cpartexe, self.__comlist)
def KeyRed(self):
sel = self["list"].getCurrent()
if sel and sel[1] and sel[5][PA_TYPE] & self.PA_TYPE_LAST and bool(sel[5][PA_TYPE] & self.PA_TYPE_FREE) == False:
try:
self.__new_part_list.remove(sel[5])#aktuelle part löschen
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
if x[PA_TYPE] & self.PA_TYPE_FREE:#letzte Freie suchen und auch löschen
self.__new_part_list.remove(x)
break
else:
x[PA_TYPE] = self.PA_TYPE_USE
lastPartEnd = 0
if len(self.__new_part_list) > 1:#von letzter Part, TYp setzen und Ende ermitteln
self.__new_part_list[len(self.__new_part_list) - 1][PA_TYPE] = self.PA_TYPE_USE | self.PA_TYPE_LAST
lastPartEnd = self.__new_part_list[len(self.__new_part_list) - 1][PA_END]
if lastPartEnd < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__new_part_list, lastPartEnd)
#for x in self.__new_part_list:
# if x[LIST_TYPE]==LIST_TYPE_PAR:
# print x
except:
print("[eParted] <remove part>")
self.__Filllist()
def KeyGreen(self):
sel = self["list"].getCurrent()
if sel and sel[5] and sel[5][PA_TYPE] & self.PA_TYPE_FREE and sel[5][PA_START] < sel[5][PA_END] and len(self.__new_part_list) < 6:
self.session.openWithCallback(self.__CallbackAddPart, AddPart, sel[5][PA_END] - sel[5][PA_START], self.__unit, len(self.__new_part_list) - 1)
def __CallbackAddPart(self, val=None):
if val:
for x in self.__new_part_list:
if x[LIST_TYPE] == LIST_TYPE_PAR:
if x[PA_TYPE] & self.PA_TYPE_FREE:
x[PA_SIZE] = val[0]
x[PA_FS] = val[1]
x[PA_END] = x[PA_START] + x[PA_SIZE]
x[PA_TYPE] = self.PA_TYPE_USE | self.PA_TYPE_LAST
if x[PA_END] < self.__fullsize:#Wenn noch Frei, Part erstellen
self.__addFreePart(self.__new_part_list, x[PA_END])
break
else:
x[PA_TYPE] = self.PA_TYPE_USE
self.__Filllist()
def __addPart2Comlist(self, list, val, mkpart=True):
#print val
partnr = val[PA_NR]
if mkpart:
fs = val[PA_FS]
com = "parted -s -a optimal %s mkpart primary %s %s%s %s%s" % (self.__devpath, fs, val[PA_START], self.__unit, val[PA_END], self.__unit)
list.append((com, _("create partition %s") % partnr, None))
mountdev = None
if val[PA_FS] == "linux-swap":
mkfs = "/sbin/mkswap"
elif val[PA_FS] == "fat16":
mkfs = "/usr/sbin/mkfs.msdos -F 16"
elif val[PA_FS] == "fat32":
mkfs = "/sbin/mkfs.vfat"
else:
mkfs = "/sbin/mkfs." + val[PA_FS]
mountdev = self.__devpath + partnr
if val[PA_FS] == "xfs":
mkfs += " -f"
com = "%s %s%s" % (mkfs, self.__devpath, partnr)
list.append((com, _("make filesystem '%s' on partition %s (%d %s)") % (val[PA_FS], partnr, val[PA_SIZE], self.__unit), mountdev))
def __delPart2Comlist(self, list, val):
partnr = val[PA_NR]
dev = "%s%s" % (self.__devpath, partnr)
mp = ismounted(dev)
if mp is not None:
if myExecute("umount %s" % mp, self.session):
return
list.insert(0, ("parted -s -a none %s rm %s" % (self.__devpath, partnr), _("delete partition %s") % partnr, None))
def __createCommandList(self):
self.__comlist = []
#welche parts sollen gelöscht werden
for x in range(len(self.__old_part_list)):
if self.__old_part_list[x][LIST_TYPE] == LIST_TYPE_PAR:
if bool(self.__old_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if len(self.__new_part_list) > x:
if self.__old_part_list[x][PA_SIZE] != self.__new_part_list[x][PA_SIZE]:
#print self.__old_part_list[x], self.__new_part_list[x]
self.__delPart2Comlist(self.__comlist, self.__old_part_list[x])
else:
self.__delPart2Comlist(self.__comlist, self.__old_part_list[x])
#welche parts sollen erstellt werden
for x in range(len(self.__new_part_list)):
if self.__new_part_list[x][LIST_TYPE] == LIST_TYPE_PAR:
if bool(self.__new_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if len(self.__old_part_list) > x and bool(self.__old_part_list[x][PA_TYPE] & self.PA_TYPE_FREE) == False:
if self.__new_part_list[x][PA_SIZE] != self.__old_part_list[x][PA_SIZE]:
#print self.__new_part_list[x], self.__old_part_list[x]
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x])
else:
if self.__new_part_list[x][PA_FS] != self.__old_part_list[x][PA_FS]:
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x], False)
else:
self.__addPart2Comlist(self.__comlist, self.__new_part_list[x])
#for x in self.__comlist: print "[eParted] com =",x
if len(self.__comlist):
self["PixmapBlue"].setPixmapNum(1)
self["LabelBlue"].setText(_("execute"))
else:
self["PixmapBlue"].setPixmapNum(0)
self["LabelBlue"].setText("")
class Cpartexe(Screen):
skin = """<screen position="center,center" size="670,400" title=" ">
<widget source="list" render="Listbox" position="0,0" size="670,360" scrollbarMode="showOnDemand" enableWrapAround="on">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (40,5), size = (630, 30), font=0, flags = RT_HALIGN_LEFT, text=0),
MultiContentEntryPixmapAlphaTest(pos = (5, 5), size = (35,35), png=1),
],
"fonts": [gFont("Regular", 22)],
"itemHeight": 40
}
</convert>
</widget>
<widget name="PixmapButton" position="25,370" size="15,16" pixmaps="skin_default/buttons/button_green.png,skin_default/buttons/button_green_off.png" transparent="1" alphatest="on" />
<widget name="LabelButton" position="50,360" size="620,40" font="Regular;19" valign="center" />
</screen>"""
def __init__(self, session, comlist):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"cancel": self.Exit,
"green": self.KeyGreen,
#"red": self.KeyRed
}, -1)
self.setTitle(_("execute"))
self["PixmapButton"] = MultiPixmap()
self["LabelButton"] = Label(_("Start") + " ?")
self.mountlist = []
list = []
for x in comlist:
print(x)
list.append((x[1], None, x[0]))
if x[2] is not None:
self.mountlist.append(x[2])
self["list"] = List(list)
self.__Stimer = eTimer()
self.__Stimer.callback.append(self.__exeList)
self.__state = -1
def __getPartitionUUID(self, device):
try:
if os_path.exists("/dev/disk/by-uuid"):
for uuid in listdir("/dev/disk/by-uuid/"):
if not os_path.exists("/dev/disk/by-uuid/" + uuid):
return None
if os_path.realpath("/dev/disk/by-uuid/" + uuid) == device:
return ("/dev/disk/by-uuid/" + uuid, uuid)
else:
return (device, device[5:])
except:
print("[eParted] <error get UUID>")
return None
def __mountDevice(self):
for x in self.mountlist:
dev = self.__getPartitionUUID(x)
if dev is not None:
if os_path.exists("/media/" + dev[1]) == False:
createDir("/media/" + dev[1], True)
cmd = "mount %s /media/%s" % (dev[0], dev[1])
myExecute(cmd, None)
def Exit(self):
if self.__state < 0:
del self.__Stimer
self.__mountDevice()
self.close()
def __exeList(self):
if len(self["list"].list) > self.__state and self.__state > -1:
res = myExecute(self["list"].list[self.__state][2], self.session)
pic = "test_false.png"
if res == 0:
pic = "test_true.png"
self["list"].list[self.__state] = (self["list"].list[self.__state][0], LoadPixmap(path=SkinDefaultPath + pic), self["list"].list[self.__state][2], self["list"].list[self.__state][2])
self["list"].updateList(self["list"].list)
self["list"].setIndex(self.__state)
if res == 0:
self.__state += 1
else:
self.__state = len(self["list"].list)#bei fehler ans Ende der liste
self["PixmapButton"].setPixmapNum(0)
self["LabelButton"].setText(_("quit"))
self.__Stimer.start(500, True)
else:
self.__state = -2
self["PixmapButton"].setPixmapNum(0)
self["LabelButton"].setText(_("quit"))
def KeyGreen(self):
if self.__state == -1:
global rereaddevices
rereaddevices = True
self.__state += 1
self["PixmapButton"].setPixmapNum(1)
self["LabelButton"].setText(_("Please Wait"))
self["list"].setIndex(0)
self.__Stimer.start(500, True)
elif self.__state == -2:
self.Exit()
#def KeyRed(self):
# self.Exit() | en | 0.204013 | # -*- coding: utf-8 -*- # code by GeminiTeam #from Plugins.Bp.geminimain.gTools import cleanexit #----------------------------------------------------------------------------- #Einheit abschneiden #if x=="BYT;":#start #end #Part #Device #------------------------------------------------------------------------------------- <screen position="center,center" size="600,200" title="eParted v0.13"> <widget name="list" position="5,5" size="590,190" /> </screen> #cleanexit(__name__) #------------------------------------------------------------------------------------- <screen name="AddPart" position="center,center" size="600,190" title="add Partition" > <ePixmap pixmap="skin_default/buttons/red.png" position="5,5" zPosition="0" size="140,40" transparent="1" alphatest="on" /> <ePixmap pixmap="skin_default/buttons/green.png" position="155,5" zPosition="0" size="140,40" transparent="1" alphatest="on" /> <widget render="Label" source="key_red" position="5,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" /> <widget render="Label" source="key_green" position="155,5" size="140,40" zPosition="2" valign="center" halign="center" backgroundColor="red" font="Regular;21" transparent="1" foregroundColor="white" shadowColor="black" /> <widget name="config" position="5,60" size="590,120" scrollbarMode="showOnDemand" /> </screen> #nur 4 parts möglich bei primary #------------------------------------------------------------------------------------- <screen position="center,center" size="670,200" title="eParted"> <widget source="list" render="Listbox" position="0,0" size="670,160" scrollbarMode="showOnDemand" enableWrapAround="on"> <convert type="TemplatedMultiContent"> {"template": [ MultiContentEntryText(pos = (0,5), size = (50, 30), font=0, flags = RT_HALIGN_LEFT, text=0), MultiContentEntryText(pos = (60,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=1), MultiContentEntryText(pos = (210,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=2), MultiContentEntryText(pos = (360,5), size = (150, 30), font=0, flags = RT_HALIGN_LEFT, text=3), MultiContentEntryText(pos = (510,5), size = (160, 30), font=0, flags = RT_HALIGN_LEFT, text=4) ], "fonts": [gFont("Regular", 20)], "itemHeight": 35 } </convert> </widget> <widget name="PixmapRed" position="25,170" size="15,16" pixmaps="skin_default/buttons/button_red_off.png,skin_default/buttons/button_red.png" transparent="1" alphatest="on" /> <widget name="LabelRed" position="50,160" size="150,40" font="Regular;19" valign="center" /> <widget name="PixmapGreen" position="225,170" size="15,16" pixmaps="skin_default/buttons/button_green_off.png,skin_default/buttons/button_green.png" transparent="1" alphatest="on" /> <widget name="LabelGreen" position="250,160" size="150,40" font="Regular;19" valign="center" /> <widget name="PixmapBlue" position="425,170" size="15,16" pixmaps="skin_default/buttons/button_blue_off.png,skin_default/buttons/button_blue.png" transparent="1" alphatest="on" /> <widget name="LabelBlue" position="450,160" size="150,40" font="Regular;19" valign="center" /> </screen> #print x #is letzte part #Wenn noch Frei, Part erstellen #aktuelle part löschen #letzte Freie suchen und auch löschen #von letzter Part, TYp setzen und Ende ermitteln #Wenn noch Frei, Part erstellen #for x in self.__new_part_list: # if x[LIST_TYPE]==LIST_TYPE_PAR: # print x #Wenn noch Frei, Part erstellen #print val #welche parts sollen gelöscht werden #print self.__old_part_list[x], self.__new_part_list[x] #welche parts sollen erstellt werden #print self.__new_part_list[x], self.__old_part_list[x] #for x in self.__comlist: print "[eParted] com =",x <screen position="center,center" size="670,400" title=" "> <widget source="list" render="Listbox" position="0,0" size="670,360" scrollbarMode="showOnDemand" enableWrapAround="on"> <convert type="TemplatedMultiContent"> {"template": [ MultiContentEntryText(pos = (40,5), size = (630, 30), font=0, flags = RT_HALIGN_LEFT, text=0), MultiContentEntryPixmapAlphaTest(pos = (5, 5), size = (35,35), png=1), ], "fonts": [gFont("Regular", 22)], "itemHeight": 40 } </convert> </widget> <widget name="PixmapButton" position="25,370" size="15,16" pixmaps="skin_default/buttons/button_green.png,skin_default/buttons/button_green_off.png" transparent="1" alphatest="on" /> <widget name="LabelButton" position="50,360" size="620,40" font="Regular;19" valign="center" /> </screen> #"red": self.KeyRed #bei fehler ans Ende der liste #def KeyRed(self): # self.Exit() | 1.810643 | 2 |
app/views/__init__.py | FSU-ACM/Contest-Server | 8 | 6631841 | <gh_stars>1-10
# app.views
from flask import render_template
from flask.views import View
from app import app
from app.models import Team
class IndexView(View):
"""Main index view
"""
def dispatch_request(self):
return render_template('index/index.html')
class TeamListView(View):
"""View listing all registered teams
"""
def dispatch_request(self):
"""Prepares list of valid teams, counts number of participants, and
renders the template.
"""
# A team is only valid if:
# 1. teamname is a valid property (exists) and not empty
# 2. There is at least one member
# 3. It has an assigned division
teams = Team.objects.filter(
team_name__exists=True, team_name__ne="",
members__0__exists=True,
division__exists=True
)
num_members = sum([len(team.members) for team in teams])
return render_template(
'form2/allteams.html',
teams=teams,
num_members=num_members
)
@app.errorhandler(404)
def page_not_found(e):
return render_template('common/404.html'), 404
@app.errorhandler(500)
def page_error(e):
return render_template('common/500.html'), 500
from . import account
from . import admin
from . import auth
from . import register
from . import team
class Route:
def __init__(self, url, view):
self.url, self.view = url, view
routes = [
Route('/', IndexView.as_view('index')),
Route('/teams', TeamListView.as_view('teams')),
Route('/register', register.SoloRegisterView.as_view('register')),
Route('/quickregister', register.QuickRegisterView.as_view('quick_register')),
Route('/login', auth.LoginView.as_view('login')),
Route('/logout', auth.LogoutView.as_view('logout')),
Route('/reset_password', auth.ResetPasswordView.as_view('reset_password')),
Route('/account/updatepassword', auth.UpdatePasswordView.as_view('update_password')),
Route('/account', account.EditAccountView.as_view('account')),
Route('/account/team', team.TeamView.as_view('team')),
Route('/account/team/update', team.UpdateView.as_view('team_update')),
Route('/account/team/create', team.CreateView.as_view('team_create')),
Route('/account/team/add', team.AddView.as_view('team_add_member')),
Route('/account/team/leave', team.LeaveView.as_view('team_leave')),
Route('/account/team/remove', team.RemoveView.as_view('team_remove')),
Route('/admin/signin', admin.SignInView.as_view('sign_in')),
Route('/admin/signin_all', admin.SigninAllView.as_view('sign_in_all')),
]
[app.add_url_rule(route.url, view_func=route.view) for route in routes]
| # app.views
from flask import render_template
from flask.views import View
from app import app
from app.models import Team
class IndexView(View):
"""Main index view
"""
def dispatch_request(self):
return render_template('index/index.html')
class TeamListView(View):
"""View listing all registered teams
"""
def dispatch_request(self):
"""Prepares list of valid teams, counts number of participants, and
renders the template.
"""
# A team is only valid if:
# 1. teamname is a valid property (exists) and not empty
# 2. There is at least one member
# 3. It has an assigned division
teams = Team.objects.filter(
team_name__exists=True, team_name__ne="",
members__0__exists=True,
division__exists=True
)
num_members = sum([len(team.members) for team in teams])
return render_template(
'form2/allteams.html',
teams=teams,
num_members=num_members
)
@app.errorhandler(404)
def page_not_found(e):
return render_template('common/404.html'), 404
@app.errorhandler(500)
def page_error(e):
return render_template('common/500.html'), 500
from . import account
from . import admin
from . import auth
from . import register
from . import team
class Route:
def __init__(self, url, view):
self.url, self.view = url, view
routes = [
Route('/', IndexView.as_view('index')),
Route('/teams', TeamListView.as_view('teams')),
Route('/register', register.SoloRegisterView.as_view('register')),
Route('/quickregister', register.QuickRegisterView.as_view('quick_register')),
Route('/login', auth.LoginView.as_view('login')),
Route('/logout', auth.LogoutView.as_view('logout')),
Route('/reset_password', auth.ResetPasswordView.as_view('reset_password')),
Route('/account/updatepassword', auth.UpdatePasswordView.as_view('update_password')),
Route('/account', account.EditAccountView.as_view('account')),
Route('/account/team', team.TeamView.as_view('team')),
Route('/account/team/update', team.UpdateView.as_view('team_update')),
Route('/account/team/create', team.CreateView.as_view('team_create')),
Route('/account/team/add', team.AddView.as_view('team_add_member')),
Route('/account/team/leave', team.LeaveView.as_view('team_leave')),
Route('/account/team/remove', team.RemoveView.as_view('team_remove')),
Route('/admin/signin', admin.SignInView.as_view('sign_in')),
Route('/admin/signin_all', admin.SigninAllView.as_view('sign_in_all')),
]
[app.add_url_rule(route.url, view_func=route.view) for route in routes] | en | 0.887906 | # app.views Main index view View listing all registered teams Prepares list of valid teams, counts number of participants, and renders the template. # A team is only valid if: # 1. teamname is a valid property (exists) and not empty # 2. There is at least one member # 3. It has an assigned division | 2.652263 | 3 |
Strings/929. Unique Email Addresses.py | thewires2/Leetcode | 1 | 6631842 | class Solution:
def numUniqueEmails(self, emails: List[str]) -> int:
d=[]
for i in emails:
x,y=i.split("@")
if "+" in x:
x=x[:x.index("+")]
x=x.replace(".","")
d.append(x+"@"+y)
return len(set(d))
| class Solution:
def numUniqueEmails(self, emails: List[str]) -> int:
d=[]
for i in emails:
x,y=i.split("@")
if "+" in x:
x=x[:x.index("+")]
x=x.replace(".","")
d.append(x+"@"+y)
return len(set(d))
| none | 1 | 3.057019 | 3 |
|
cogs/commands/welcome.py | noahdm4321/GWU-Bot | 0 | 6631843 | import random
from discord.ext import commands
from database import data
class WelcomeMessage(commands.Cog, name='Welcome Message'):
def __init__(self, client):
self.client = client
self.channel = client.get_guild(879009710701957130).get_channel(879009714069962843)
print('welcome online!')
## Sends user welcome message ##
@commands.command()
async def welcome(self, ctx):
for member in ctx.message.mentions:
first = [f'<@!{member.id}>, welcome to Guild Wars 2 University!',
f'Welcome <@!{member.id}> to Guild Wars 2 University!',
f'Welcome <@!{member.id}>!',
f'Welcome to Guild Wars 2 University, <@!{member.id}>!']
last = ['Feel free to introduce yourself.',
'Feel free to introduce yourself here if you want.',
'Please introduce yourself here.',
'Please introduce yourself here if you want.',
'What is your favorite class in the game?',
'How long have you been playing Guild Wars 2?',
'Are you excited for the new expansion, End of Dragons?',
'Do you have a favorite gamemode or playstyle?',
'It is recommend to change your discord nickname to your Gw2 account name, so that people can identify you in-game.']
await self.channel.send(f'{random.choice(first)} {random.choice(last)}')
id = data.read('id', 'welcome')
message = await self.channel.fetch_message(id)
await message.delete()
message = await self.channel.send("New to Guild Wars 2? Check out our <#797318280826191922>. New to Discord? Check out our <#875232316010668082>.\nIf you still have questions about the Guid Wars 2 or Discord, you can ask in <#736254186353721454>.\nWe have events scheduled in our <#852189307031781426>. Be sure to you have the correct <#735989626455457894> to attend them.")
data.write('id', 'welcome', message.id)
def setup(client):
client.add_cog(WelcomeMessage(client)) | import random
from discord.ext import commands
from database import data
class WelcomeMessage(commands.Cog, name='Welcome Message'):
def __init__(self, client):
self.client = client
self.channel = client.get_guild(879009710701957130).get_channel(879009714069962843)
print('welcome online!')
## Sends user welcome message ##
@commands.command()
async def welcome(self, ctx):
for member in ctx.message.mentions:
first = [f'<@!{member.id}>, welcome to Guild Wars 2 University!',
f'Welcome <@!{member.id}> to Guild Wars 2 University!',
f'Welcome <@!{member.id}>!',
f'Welcome to Guild Wars 2 University, <@!{member.id}>!']
last = ['Feel free to introduce yourself.',
'Feel free to introduce yourself here if you want.',
'Please introduce yourself here.',
'Please introduce yourself here if you want.',
'What is your favorite class in the game?',
'How long have you been playing Guild Wars 2?',
'Are you excited for the new expansion, End of Dragons?',
'Do you have a favorite gamemode or playstyle?',
'It is recommend to change your discord nickname to your Gw2 account name, so that people can identify you in-game.']
await self.channel.send(f'{random.choice(first)} {random.choice(last)}')
id = data.read('id', 'welcome')
message = await self.channel.fetch_message(id)
await message.delete()
message = await self.channel.send("New to Guild Wars 2? Check out our <#797318280826191922>. New to Discord? Check out our <#875232316010668082>.\nIf you still have questions about the Guid Wars 2 or Discord, you can ask in <#736254186353721454>.\nWe have events scheduled in our <#852189307031781426>. Be sure to you have the correct <#735989626455457894> to attend them.")
data.write('id', 'welcome', message.id)
def setup(client):
client.add_cog(WelcomeMessage(client)) | en | 0.755084 | ## Sends user welcome message ## #797318280826191922>. New to Discord? Check out our <#875232316010668082>.\nIf you still have questions about the Guid Wars 2 or Discord, you can ask in <#736254186353721454>.\nWe have events scheduled in our <#852189307031781426>. Be sure to you have the correct <#735989626455457894> to attend them.") | 3.212066 | 3 |
azurelinuxagent/ga/update.py | TeamDev-it/WALinuxAgent | 0 | 6631844 | # Windows Azure Linux Agent
#
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import glob
import json
import os
import random
import re
import shutil
import signal
import stat
import subprocess
import sys
import time
import traceback
import uuid
import zipfile
from datetime import datetime, timedelta
import azurelinuxagent.common.conf as conf
import azurelinuxagent.common.logger as logger
import azurelinuxagent.common.utils.fileutil as fileutil
import azurelinuxagent.common.utils.restutil as restutil
import azurelinuxagent.common.utils.textutil as textutil
from azurelinuxagent.common.cgroupapi import CGroupsApi
from azurelinuxagent.common.cgroupconfigurator import CGroupConfigurator
from azurelinuxagent.common.event import add_event, initialize_event_logger_vminfo_common_parameters, \
elapsed_milliseconds, WALAEventOperation, EVENTS_DIRECTORY
from azurelinuxagent.common.exception import ResourceGoneError, UpdateError
from azurelinuxagent.common.future import ustr
from azurelinuxagent.common.osutil import get_osutil
from azurelinuxagent.common.protocol.util import get_protocol_util
from azurelinuxagent.common.protocol.hostplugin import HostPluginProtocol
from azurelinuxagent.common.utils.flexible_version import FlexibleVersion
from azurelinuxagent.common.version import AGENT_NAME, AGENT_VERSION, AGENT_DIR_PATTERN, CURRENT_AGENT,\
CURRENT_VERSION, DISTRO_NAME, DISTRO_VERSION, is_current_agent_installed, get_lis_version, \
has_logrotate, PY_VERSION_MAJOR, PY_VERSION_MINOR, PY_VERSION_MICRO
from azurelinuxagent.ga.collect_logs import get_collect_logs_handler, is_log_collection_allowed
from azurelinuxagent.ga.env import get_env_handler
from azurelinuxagent.ga.collect_telemetry_events import get_collect_telemetry_events_handler
from azurelinuxagent.ga.exthandlers import HandlerManifest, get_traceback, ExtHandlersHandler, list_agent_lib_directory, \
is_extension_telemetry_pipeline_enabled
from azurelinuxagent.ga.monitor import get_monitor_handler
# pylint: disable=C0302
from azurelinuxagent.ga.send_telemetry_events import get_send_telemetry_events_handler
AGENT_ERROR_FILE = "error.json" # File name for agent error record
AGENT_MANIFEST_FILE = "HandlerManifest.json"
AGENT_PARTITION_FILE = "partition"
CHILD_HEALTH_INTERVAL = 15 * 60
CHILD_LAUNCH_INTERVAL = 5 * 60
CHILD_LAUNCH_RESTART_MAX = 3
CHILD_POLL_INTERVAL = 60
MAX_FAILURE = 3 # Max failure allowed for agent before blacklisted
GOAL_STATE_INTERVAL_DISABLED = 5 * 60
ORPHAN_POLL_INTERVAL = 3
ORPHAN_WAIT_INTERVAL = 15 * 60
AGENT_SENTINEL_FILE = "current_version"
READONLY_FILE_GLOBS = [
"*.crt",
"*.p7m",
"*.pem",
"*.prv",
"ovf-env.xml"
]
def get_update_handler():
return UpdateHandler()
class UpdateHandler(object): # pylint: disable=R0902
TELEMETRY_HEARTBEAT_PERIOD = timedelta(minutes=30)
def __init__(self):
self.osutil = get_osutil()
self.protocol_util = get_protocol_util()
self.running = True
self.last_attempt_time = None
self.agents = []
self.child_agent = None
self.child_launch_time = None
self.child_launch_attempts = 0
self.child_process = None
self.signal_handler = None
self._last_telemetry_heartbeat = None
self._heartbeat_id = str(uuid.uuid4()).upper()
self._heartbeat_counter = 0
self._heartbeat_update_goal_state_error_count = 0
def run_latest(self, child_args=None): # pylint: disable=R0912,R1711
"""
This method is called from the daemon to find and launch the most
current, downloaded agent.
Note:
- Most events should be tagged to the launched agent (agent_version)
"""
if self.child_process is not None:
raise Exception("Illegal attempt to launch multiple goal state Agent processes")
if self.signal_handler is None:
self.signal_handler = signal.signal(signal.SIGTERM, self.forward_signal)
latest_agent = self.get_latest_agent()
if latest_agent is None:
logger.info(u"Installed Agent {0} is the most current agent", CURRENT_AGENT)
agent_cmd = "python -u {0} -run-exthandlers".format(sys.argv[0])
agent_dir = os.getcwd()
agent_name = CURRENT_AGENT
agent_version = CURRENT_VERSION
else:
logger.info(u"Determined Agent {0} to be the latest agent", latest_agent.name)
agent_cmd = latest_agent.get_agent_cmd()
agent_dir = latest_agent.get_agent_dir()
agent_name = latest_agent.name
agent_version = latest_agent.version
if child_args is not None:
agent_cmd = "{0} {1}".format(agent_cmd, child_args)
try:
# Launch the correct Python version for python-based agents
cmds = textutil.safe_shlex_split(agent_cmd)
if cmds[0].lower() == "python":
cmds[0] = sys.executable
agent_cmd = " ".join(cmds)
self._evaluate_agent_health(latest_agent)
self.child_process = subprocess.Popen(
cmds,
cwd=agent_dir,
stdout=sys.stdout,
stderr=sys.stderr,
env=os.environ)
logger.verbose(u"Agent {0} launched with command '{1}'", agent_name, agent_cmd)
# Setting the poll interval to poll every second to reduce the agent provisioning time;
# The daemon shouldn't wait for 60secs before starting the ext-handler in case the
# ext-handler kills itself during agent-update during the first 15 mins (CHILD_HEALTH_INTERVAL)
poll_interval = 1
ret = None
start_time = time.time()
while (time.time() - start_time) < CHILD_HEALTH_INTERVAL:
time.sleep(poll_interval)
try:
ret = self.child_process.poll()
except OSError:
# if child_process has terminated, calling poll could raise an exception
ret = -1
if ret is not None:
break
if ret is None or ret <= 0:
msg = u"Agent {0} launched with command '{1}' is successfully running".format(
agent_name,
agent_cmd)
logger.info(msg)
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=True,
message=msg,
log_event=False)
if ret is None:
ret = self.child_process.wait()
else:
msg = u"Agent {0} launched with command '{1}' failed with return code: {2}".format(
agent_name,
agent_cmd,
ret)
logger.warn(msg)
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=False,
message=msg)
if ret is not None and ret > 0:
msg = u"Agent {0} launched with command '{1}' returned code: {2}".format(
agent_name,
agent_cmd,
ret)
logger.warn(msg)
if latest_agent is not None:
latest_agent.mark_failure(is_fatal=True)
except Exception as e: # pylint: disable=C0103
# Ignore child errors during termination
if self.running:
msg = u"Agent {0} launched with command '{1}' failed with exception: {2}".format(
agent_name,
agent_cmd,
ustr(e))
logger.warn(msg)
detailed_message = '{0} {1}'.format(msg, traceback.format_exc())
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=False,
message=detailed_message)
if latest_agent is not None:
latest_agent.mark_failure(is_fatal=True)
self.child_process = None
return
def run(self, debug=False): # pylint: disable=R0912
"""
This is the main loop which watches for agent and extension updates.
"""
try:
logger.info(u"Agent {0} is running as the goal state agent", CURRENT_AGENT)
#
# Fetch the goal state one time; some components depend on information provided by the goal state and this
# call ensures the required info is initialized (e.g telemetry depends on the container ID.)
#
protocol = self.protocol_util.get_protocol()
protocol.update_goal_state()
# Initialize the common parameters for telemetry events
initialize_event_logger_vminfo_common_parameters(protocol)
# Log OS-specific info.
os_info_msg = u"Distro: {dist_name}-{dist_ver}; "\
u"OSUtil: {util_name}; AgentService: {service_name}; "\
u"Python: {py_major}.{py_minor}.{py_micro}; "\
u"systemd: {systemd}; "\
u"LISDrivers: {lis_ver}; "\
u"logrotate: {has_logrotate};".format(
dist_name=DISTRO_NAME, dist_ver=DISTRO_VERSION,
util_name=type(self.osutil).__name__,
service_name=self.osutil.service_name,
py_major=PY_VERSION_MAJOR, py_minor=PY_VERSION_MINOR,
py_micro=PY_VERSION_MICRO, systemd=CGroupsApi.is_systemd(),
lis_ver=get_lis_version(), has_logrotate=has_logrotate()
)
logger.info(os_info_msg)
add_event(AGENT_NAME, op=WALAEventOperation.OSInfo, message=os_info_msg)
#
# Perform initialization tasks
#
from azurelinuxagent.ga.exthandlers import get_exthandlers_handler, migrate_handler_state
exthandlers_handler = get_exthandlers_handler(protocol)
migrate_handler_state()
from azurelinuxagent.ga.remoteaccess import get_remote_access_handler
remote_access_handler = get_remote_access_handler(protocol)
self._ensure_no_orphans()
self._emit_restart_event()
self._emit_changes_in_default_configuration()
self._ensure_partition_assigned()
self._ensure_readonly_files()
self._ensure_cgroups_initialized()
self._ensure_extension_telemetry_state_configured_properly(protocol)
# Get all thread handlers
telemetry_handler = get_send_telemetry_events_handler(self.protocol_util)
all_thread_handlers = [
get_monitor_handler(),
get_env_handler(),
telemetry_handler,
get_collect_telemetry_events_handler(telemetry_handler)
]
if is_log_collection_allowed():
all_thread_handlers.append(get_collect_logs_handler())
# Launch all monitoring threads
for thread_handler in all_thread_handlers:
thread_handler.run()
goal_state_interval = conf.get_goal_state_period() if conf.get_extensions_enabled() else GOAL_STATE_INTERVAL_DISABLED
while self.running:
#
# Check that the parent process (the agent's daemon) is still running
#
if not debug and self._is_orphaned:
logger.info("Agent {0} is an orphan -- exiting", CURRENT_AGENT)
break
#
# Check that all the threads are still running
#
for thread_handler in all_thread_handlers:
if not thread_handler.is_alive():
logger.warn("{0} thread died, restarting".format(thread_handler.get_thread_name()))
thread_handler.start()
#
# Process the goal state
#
if not protocol.try_update_goal_state():
self._heartbeat_update_goal_state_error_count += 1
else:
if self._upgrade_available(protocol):
available_agent = self.get_latest_agent()
if available_agent is None:
logger.info(
"Agent {0} is reverting to the installed agent -- exiting",
CURRENT_AGENT)
else:
logger.info(
u"Agent {0} discovered update {1} -- exiting",
CURRENT_AGENT,
available_agent.name)
break
utc_start = datetime.utcnow()
last_etag = exthandlers_handler.last_etag
exthandlers_handler.run()
remote_access_handler.run()
if last_etag != exthandlers_handler.last_etag:
self._ensure_readonly_files()
duration = elapsed_milliseconds(utc_start)
activity_id, correlation_id, gs_creation_time = exthandlers_handler.get_goal_state_debug_metadata()
msg = 'ProcessGoalState completed [Incarnation: {0}; {1} ms; Activity Id: {2}; Correlation Id: {3}; GS Creation Time: {4}]'.format(
exthandlers_handler.last_etag, duration, activity_id, correlation_id, gs_creation_time)
logger.info(msg)
add_event(
AGENT_NAME,
op=WALAEventOperation.ProcessGoalState,
duration=duration,
message=msg)
self._send_heartbeat_telemetry(protocol)
time.sleep(goal_state_interval)
except Exception as error:
msg = u"Agent {0} failed with exception: {1}".format(CURRENT_AGENT, ustr(error))
self._set_sentinel(msg=msg)
logger.warn(msg)
logger.warn(traceback.format_exc())
sys.exit(1)
# additional return here because sys.exit is mocked in unit tests
return
self._shutdown()
sys.exit(0)
def forward_signal(self, signum, frame):
if signum == signal.SIGTERM:
self._shutdown()
if self.child_process is None:
return
logger.info(
u"Agent {0} forwarding signal {1} to {2}",
CURRENT_AGENT,
signum,
self.child_agent.name if self.child_agent is not None else CURRENT_AGENT)
self.child_process.send_signal(signum)
if self.signal_handler not in (None, signal.SIG_IGN, signal.SIG_DFL):
self.signal_handler(signum, frame)
elif self.signal_handler is signal.SIG_DFL:
if signum == signal.SIGTERM:
self._shutdown()
sys.exit(0)
return
def get_latest_agent(self):
"""
If autoupdate is enabled, return the most current, downloaded,
non-blacklisted agent which is not the current version (if any).
Otherwise, return None (implying to use the installed agent).
"""
if not conf.get_autoupdate_enabled():
return None
self._find_agents()
available_agents = [agent for agent in self.agents
if agent.is_available
and agent.version > FlexibleVersion(AGENT_VERSION)]
return available_agents[0] if len(available_agents) >= 1 else None
def _emit_restart_event(self): # pylint: disable=R1711
try:
if not self._is_clean_start:
msg = u"Agent did not terminate cleanly: {0}".format(
fileutil.read_file(self._sentinel_file_path()))
logger.info(msg)
add_event(
AGENT_NAME,
version=CURRENT_VERSION,
op=WALAEventOperation.Restart,
is_success=False,
message=msg)
except Exception:
pass
return
@staticmethod
def _emit_changes_in_default_configuration():
try:
def log_if_int_changed_from_default(name, current):
default = conf.get_int_default_value(name)
if default != current:
msg = "{0} changed from its default; new value: {1}".format(name, current)
logger.info(msg)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=msg)
log_if_int_changed_from_default("Extensions.GoalStatePeriod", conf.get_goal_state_period())
if not conf.enable_firewall():
message = "OS.EnableFirewall is False"
logger.info(message)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=message)
else:
log_if_int_changed_from_default("OS.EnableFirewallPeriod", conf.get_enable_firewall_period())
if conf.get_lib_dir() != "/var/lib/waagent":
message = "lib dir is in an unexpected location: {0}".format(conf.get_lib_dir())
logger.info(message)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=message)
except Exception as e: # pylint: disable=C0103
logger.warn("Failed to log changes in configuration: {0}", ustr(e))
def _ensure_no_orphans(self, orphan_wait_interval=ORPHAN_WAIT_INTERVAL): # pylint: disable=R1711
pid_files, ignored = self._write_pid_file() # pylint: disable=W0612
for pid_file in pid_files:
try:
pid = fileutil.read_file(pid_file)
wait_interval = orphan_wait_interval
while self.osutil.check_pid_alive(pid):
wait_interval -= ORPHAN_POLL_INTERVAL
if wait_interval <= 0:
logger.warn(
u"{0} forcibly terminated orphan process {1}",
CURRENT_AGENT,
pid)
os.kill(pid, signal.SIGKILL)
break
logger.info(
u"{0} waiting for orphan process {1} to terminate",
CURRENT_AGENT,
pid)
time.sleep(ORPHAN_POLL_INTERVAL)
os.remove(pid_file)
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception occurred waiting for orphan agent to terminate: {0}",
ustr(e))
return
def _ensure_partition_assigned(self):
"""
Assign the VM to a partition (0 - 99). Downloaded updates may be configured
to run on only some VMs; the assigned partition determines eligibility.
"""
if not os.path.exists(self._partition_file):
partition = ustr(int(datetime.utcnow().microsecond / 10000))
fileutil.write_file(self._partition_file, partition)
add_event(
AGENT_NAME,
version=CURRENT_VERSION,
op=WALAEventOperation.Partition,
is_success=True,
message=partition)
def _ensure_readonly_files(self):
for g in READONLY_FILE_GLOBS: # pylint: disable=C0103
for path in glob.iglob(os.path.join(conf.get_lib_dir(), g)):
os.chmod(path, stat.S_IRUSR)
def _ensure_cgroups_initialized(self):
configurator = CGroupConfigurator.get_instance()
configurator.initialize()
def _evaluate_agent_health(self, latest_agent):
"""
Evaluate the health of the selected agent: If it is restarting
too frequently, raise an Exception to force blacklisting.
"""
if latest_agent is None:
self.child_agent = None
return
if self.child_agent is None or latest_agent.version != self.child_agent.version:
self.child_agent = latest_agent
self.child_launch_time = None
self.child_launch_attempts = 0
if self.child_launch_time is None:
self.child_launch_time = time.time()
self.child_launch_attempts += 1
if (time.time() - self.child_launch_time) <= CHILD_LAUNCH_INTERVAL \
and self.child_launch_attempts >= CHILD_LAUNCH_RESTART_MAX:
msg = u"Agent {0} restarted more than {1} times in {2} seconds".format(
self.child_agent.name,
CHILD_LAUNCH_RESTART_MAX,
CHILD_LAUNCH_INTERVAL)
raise Exception(msg)
return
def _filter_blacklisted_agents(self):
self.agents = [agent for agent in self.agents if not agent.is_blacklisted]
def _find_agents(self): # pylint: disable=R1711
"""
Load all non-blacklisted agents currently on disk.
"""
try:
self._set_agents(self._load_agents())
self._filter_blacklisted_agents()
except Exception as e: # pylint: disable=C0103
logger.warn(u"Exception occurred loading available agents: {0}", ustr(e))
return
def _get_host_plugin(self, protocol):
return protocol.client.get_host_plugin() if protocol and protocol.client else None
def _get_pid_parts(self):
pid_file = conf.get_agent_pid_file_path()
pid_dir = os.path.dirname(pid_file)
pid_name = os.path.basename(pid_file)
pid_re = re.compile("(\d+)_{0}".format(re.escape(pid_name))) # pylint: disable=W1401
return pid_dir, pid_name, pid_re
def _get_pid_files(self):
pid_dir, pid_name, pid_re = self._get_pid_parts() # pylint: disable=W0612
pid_files = [os.path.join(pid_dir, f) for f in os.listdir(pid_dir) if pid_re.match(f)]
pid_files.sort(key=lambda f: int(pid_re.match(os.path.basename(f)).group(1)))
return pid_files
@property
def _is_clean_start(self):
return not os.path.isfile(self._sentinel_file_path())
@property
def _is_orphaned(self):
parent_pid = os.getppid()
if parent_pid in (1, None):
return True
if not os.path.isfile(conf.get_agent_pid_file_path()):
return True
return fileutil.read_file(conf.get_agent_pid_file_path()) != ustr(parent_pid)
def _is_version_eligible(self, version):
# Ensure the installed version is always eligible
if version == CURRENT_VERSION and is_current_agent_installed():
return True
for agent in self.agents:
if agent.version == version:
return agent.is_available
return False
def _load_agents(self):
path = os.path.join(conf.get_lib_dir(), "{0}-*".format(AGENT_NAME))
return [GuestAgent(path=agent_dir)
for agent_dir in glob.iglob(path) if os.path.isdir(agent_dir)]
def _partition(self):
return int(fileutil.read_file(self._partition_file))
@property
def _partition_file(self):
return os.path.join(conf.get_lib_dir(), AGENT_PARTITION_FILE)
def _purge_agents(self): # pylint: disable=R1711
"""
Remove from disk all directories and .zip files of unknown agents
(without removing the current, running agent).
"""
path = os.path.join(conf.get_lib_dir(), "{0}-*".format(AGENT_NAME))
known_versions = [agent.version for agent in self.agents]
if CURRENT_VERSION not in known_versions:
logger.verbose(
u"Running Agent {0} was not found in the agent manifest - adding to list",
CURRENT_VERSION)
known_versions.append(CURRENT_VERSION)
for agent_path in glob.iglob(path):
try:
name = fileutil.trim_ext(agent_path, "zip")
m = AGENT_DIR_PATTERN.match(name) # pylint: disable=C0103
if m is not None and FlexibleVersion(m.group(1)) not in known_versions:
if os.path.isfile(agent_path):
logger.info(u"Purging outdated Agent file {0}", agent_path)
os.remove(agent_path)
else:
logger.info(u"Purging outdated Agent directory {0}", agent_path)
shutil.rmtree(agent_path)
except Exception as e: # pylint: disable=C0103
logger.warn(u"Purging {0} raised exception: {1}", agent_path, ustr(e))
return
def _set_agents(self, agents=None): # pylint: disable=R1711
if agents is None:
agents = []
self.agents = agents
self.agents.sort(key=lambda agent: agent.version, reverse=True)
return
def _set_sentinel(self, agent=CURRENT_AGENT, msg="Unknown cause"): # pylint: disable=R1711
try:
fileutil.write_file(
self._sentinel_file_path(),
"[{0}] [{1}]".format(agent, msg))
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception writing sentinel file {0}: {1}",
self._sentinel_file_path(),
str(e))
return
def _sentinel_file_path(self):
return os.path.join(conf.get_lib_dir(), AGENT_SENTINEL_FILE)
def _shutdown(self):
# Todo: Ensure all threads stopped when shutting down the main extension handler to ensure that the state of
# all threads is clean.
self.running = False
if not os.path.isfile(self._sentinel_file_path()):
return
try:
os.remove(self._sentinel_file_path())
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception removing sentinel file {0}: {1}",
self._sentinel_file_path(),
str(e))
return
def _upgrade_available(self, protocol, base_version=CURRENT_VERSION):
# Ignore new agents if updating is disabled
if not conf.get_autoupdate_enabled():
return False
now = time.time()
if self.last_attempt_time is not None:
next_attempt_time = self.last_attempt_time + \
conf.get_autoupdate_frequency()
else:
next_attempt_time = now
if next_attempt_time > now:
return False
family = conf.get_autoupdate_gafamily()
logger.info("Checking for agent updates (family: {0})", family)
self.last_attempt_time = now
try:
manifest_list, etag = protocol.get_vmagent_manifests()
manifests = [m for m in manifest_list.vmAgentManifests \
if m.family == family and len(m.versionsManifestUris) > 0]
if len(manifests) == 0: # pylint: disable=len-as-condition
logger.verbose(u"Incarnation {0} has no {1} agent updates",
etag, family)
return False
pkg_list = protocol.get_vmagent_pkgs(manifests[0])
# Set the agents to those available for download at least as
# current as the existing agent and remove from disk any agent
# no longer reported to the VM.
# Note:
# The code leaves on disk available, but blacklisted, agents
# so as to preserve the state. Otherwise, those agents could be
# again downloaded and inappropriately retried.
host = self._get_host_plugin(protocol=protocol)
self._set_agents([GuestAgent(pkg=pkg, host=host) for pkg in pkg_list.versions])
self._purge_agents()
self._filter_blacklisted_agents()
# Return True if current agent is no longer available or an
# agent with a higher version number is available
return not self._is_version_eligible(base_version) \
or (len(self.agents) > 0 and self.agents[0].version > base_version)
except Exception as e: # pylint: disable=W0612,C0103
msg = u"Exception retrieving agent manifests: {0}".format(ustr(traceback.format_exc()))
add_event(AGENT_NAME, op=WALAEventOperation.Download, version=CURRENT_VERSION, is_success=False,
message=msg)
return False
def _write_pid_file(self):
pid_files = self._get_pid_files()
pid_dir, pid_name, pid_re = self._get_pid_parts()
previous_pid_file = None if len(pid_files) <= 0 else pid_files[-1] # pylint: disable=len-as-condition
pid_index = -1 \
if previous_pid_file is None \
else int(pid_re.match(os.path.basename(previous_pid_file)).group(1))
pid_file = os.path.join(pid_dir, "{0}_{1}".format(pid_index + 1, pid_name))
try:
fileutil.write_file(pid_file, ustr(os.getpid()))
logger.info(u"{0} running as process {1}", CURRENT_AGENT, ustr(os.getpid()))
except Exception as e: # pylint: disable=C0103
pid_file = None
logger.warn(
u"Expection writing goal state agent {0} pid to {1}: {2}",
CURRENT_AGENT,
pid_file,
ustr(e))
return pid_files, pid_file
def _send_heartbeat_telemetry(self, protocol):
if self._last_telemetry_heartbeat is None:
self._last_telemetry_heartbeat = datetime.utcnow() - UpdateHandler.TELEMETRY_HEARTBEAT_PERIOD
if datetime.utcnow() >= (self._last_telemetry_heartbeat + UpdateHandler.TELEMETRY_HEARTBEAT_PERIOD):
dropped_packets = self.osutil.get_firewall_dropped_packets(protocol.get_endpoint())
auto_update_enabled = 1 if conf.get_autoupdate_enabled() else 0
telemetry_msg = "{0};{1};{2};{3};{4}".format(self._heartbeat_counter, self._heartbeat_id, dropped_packets,
self._heartbeat_update_goal_state_error_count,
auto_update_enabled)
debug_log_msg = "[DEBUG HeartbeatCounter: {0};HeartbeatId: {1};DroppedPackets: {2};" \
"UpdateGSErrors: {3};AutoUpdate: {4}]".format(self._heartbeat_counter,
self._heartbeat_id, dropped_packets,
self._heartbeat_update_goal_state_error_count,
auto_update_enabled)
# Write Heartbeat events/logs
add_event(name=AGENT_NAME, version=CURRENT_VERSION, op=WALAEventOperation.HeartBeat, is_success=True,
message=telemetry_msg, log_event=False)
logger.info(u"[HEARTBEAT] Agent {0} is running as the goal state agent {1}", CURRENT_AGENT, debug_log_msg)
# Update/Reset the counters
self._heartbeat_counter += 1
self._heartbeat_update_goal_state_error_count = 0
self._last_telemetry_heartbeat = datetime.utcnow()
@staticmethod
def _ensure_extension_telemetry_state_configured_properly(protocol):
for name, path in list_agent_lib_directory(skip_agent_package=True):
try:
handler_instance = ExtHandlersHandler.get_ext_handler_instance_from_path(name=name,
path=path,
protocol=protocol)
except Exception:
# Ignore errors if any
continue
try:
if handler_instance is not None:
# Recreate the HandlerEnvironment for existing extensions on startup.
# This is to ensure that existing extensions can start using the telemetry pipeline if they support
# it and also ensures that the extensions are not sending out telemetry if the Agent has to disable the feature.
handler_instance.create_handler_env()
except Exception as e: # pylint: disable=C0103
logger.warn(
"Unable to re-create HandlerEnvironment file on service startup. Error: {0}".format(ustr(e)))
continue
try:
if not is_extension_telemetry_pipeline_enabled():
# If extension telemetry pipeline is disabled, ensure we delete all existing extension events directory
# because the agent will not be listening on those events.
extension_event_dirs = glob.glob(os.path.join(conf.get_ext_log_dir(), "*", EVENTS_DIRECTORY))
for ext_dir in extension_event_dirs:
shutil.rmtree(ext_dir, ignore_errors=True)
except Exception as e: # pylint: disable=C0103
logger.warn("Error when trying to delete existing Extension events directory. Error: {0}".format(ustr(e)))
class GuestAgent(object):
def __init__(self, path=None, pkg=None, host=None):
self.pkg = pkg
self.host = host
version = None
if path is not None:
m = AGENT_DIR_PATTERN.match(path) # pylint: disable=C0103
if m == None: # pylint: disable=C0121
raise UpdateError(u"Illegal agent directory: {0}".format(path))
version = m.group(1)
elif self.pkg is not None:
version = pkg.version
if version == None: # pylint: disable=C0121
raise UpdateError(u"Illegal agent version: {0}".format(version))
self.version = FlexibleVersion(version)
location = u"disk" if path is not None else u"package"
logger.verbose(u"Loading Agent {0} from {1}", self.name, location)
self.error = GuestAgentError(self.get_agent_error_file())
self.error.load()
try:
self._ensure_downloaded()
self._ensure_loaded()
except Exception as e: # pylint: disable=C0103
if isinstance(e, ResourceGoneError):
raise
# The agent was improperly blacklisting versions due to a timeout
# encountered while downloading a later version. Errors of type
# socket.error are IOError, so this should provide sufficient
# protection against a large class of I/O operation failures.
if isinstance(e, IOError):
raise
# Note the failure, blacklist the agent if the package downloaded
# - An exception with a downloaded package indicates the package
# is corrupt (e.g., missing the HandlerManifest.json file)
self.mark_failure(is_fatal=os.path.isfile(self.get_agent_pkg_path()))
msg = u"Agent {0} install failed with exception: {1}".format(
self.name, ustr(e))
detailed_msg = '{0} {1}'.format(msg, traceback.extract_tb(get_traceback(e)))
add_event(
AGENT_NAME,
version=self.version,
op=WALAEventOperation.Install,
is_success=False,
message=detailed_msg)
@property
def name(self):
return "{0}-{1}".format(AGENT_NAME, self.version)
def get_agent_cmd(self):
return self.manifest.get_enable_command()
def get_agent_dir(self):
return os.path.join(conf.get_lib_dir(), self.name)
def get_agent_error_file(self):
return os.path.join(conf.get_lib_dir(), self.name, AGENT_ERROR_FILE)
def get_agent_manifest_path(self):
return os.path.join(self.get_agent_dir(), AGENT_MANIFEST_FILE)
def get_agent_pkg_path(self):
return ".".join((os.path.join(conf.get_lib_dir(), self.name), "zip"))
def clear_error(self):
self.error.clear()
self.error.save()
@property
def is_available(self):
return self.is_downloaded and not self.is_blacklisted
@property
def is_blacklisted(self):
return self.error is not None and self.error.is_blacklisted
@property
def is_downloaded(self):
return self.is_blacklisted or \
os.path.isfile(self.get_agent_manifest_path())
def mark_failure(self, is_fatal=False):
try:
if not os.path.isdir(self.get_agent_dir()):
os.makedirs(self.get_agent_dir())
self.error.mark_failure(is_fatal=is_fatal)
self.error.save()
if self.error.is_blacklisted:
logger.warn(u"Agent {0} is permanently blacklisted", self.name)
except Exception as e: # pylint: disable=C0103
logger.warn(u"Agent {0} failed recording error state: {1}", self.name, ustr(e))
def _ensure_downloaded(self):
logger.verbose(u"Ensuring Agent {0} is downloaded", self.name)
if self.is_downloaded:
logger.verbose(u"Agent {0} was previously downloaded - skipping download", self.name)
return
if self.pkg is None:
raise UpdateError(u"Agent {0} is missing package and download URIs".format(
self.name))
self._download()
self._unpack()
msg = u"Agent {0} downloaded successfully".format(self.name)
logger.verbose(msg)
add_event(
AGENT_NAME,
version=self.version,
op=WALAEventOperation.Install,
is_success=True,
message=msg)
def _ensure_loaded(self):
self._load_manifest()
self._load_error()
def _download(self):
uris_shuffled = self.pkg.uris
random.shuffle(uris_shuffled)
for uri in uris_shuffled:
if not HostPluginProtocol.is_default_channel and self._fetch(uri.uri): # pylint: disable=R1723
break
elif self.host is not None and self.host.ensure_initialized():
if not HostPluginProtocol.is_default_channel:
logger.warn("Download failed, switching to host plugin")
else:
logger.verbose("Using host plugin as default channel")
uri, headers = self.host.get_artifact_request(uri.uri, self.host.manifest_uri)
try:
if self._fetch(uri, headers=headers, use_proxy=False): # pylint: disable=R1723
if not HostPluginProtocol.is_default_channel:
logger.verbose("Setting host plugin as default channel")
HostPluginProtocol.is_default_channel = True
break
else:
logger.warn("Host plugin download failed")
# If the HostPlugin rejects the request,
# let the error continue, but set to use the HostPlugin
except ResourceGoneError:
HostPluginProtocol.is_default_channel = True
raise
else:
logger.error("No download channels available")
if not os.path.isfile(self.get_agent_pkg_path()):
msg = u"Unable to download Agent {0} from any URI".format(self.name)
add_event(
AGENT_NAME,
op=WALAEventOperation.Download,
version=CURRENT_VERSION,
is_success=False,
message=msg)
raise UpdateError(msg)
def _fetch(self, uri, headers=None, use_proxy=True):
package = None
try:
is_healthy = True
error_response = ''
resp = restutil.http_get(uri, use_proxy=use_proxy, headers=headers)
if restutil.request_succeeded(resp):
package = resp.read()
fileutil.write_file(self.get_agent_pkg_path(),
bytearray(package),
asbin=True)
logger.verbose(u"Agent {0} downloaded from {1}", self.name, uri)
else:
error_response = restutil.read_response_error(resp)
logger.verbose("Fetch was unsuccessful [{0}]", error_response)
is_healthy = not restutil.request_failed_at_hostplugin(resp)
if self.host is not None:
self.host.report_fetch_health(uri, is_healthy, source='GuestAgent', response=error_response)
except restutil.HttpError as http_error:
if isinstance(http_error, ResourceGoneError):
raise
logger.verbose(u"Agent {0} download from {1} failed [{2}]",
self.name,
uri,
http_error)
return package is not None
def _load_error(self):
try:
self.error = GuestAgentError(self.get_agent_error_file())
self.error.load()
logger.verbose(u"Agent {0} error state: {1}", self.name, ustr(self.error))
except Exception as e: # pylint: disable=C0103
logger.warn(u"Agent {0} failed loading error state: {1}", self.name, ustr(e))
def _load_manifest(self): # pylint: disable=R1711
path = self.get_agent_manifest_path()
if not os.path.isfile(path):
msg = u"Agent {0} is missing the {1} file".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
with open(path, "r") as manifest_file:
try:
manifests = json.load(manifest_file)
except Exception as e: # pylint: disable=C0103
msg = u"Agent {0} has a malformed {1}".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
if type(manifests) is list: # pylint: disable=C0123
if len(manifests) <= 0: # pylint: disable=len-as-condition
msg = u"Agent {0} has an empty {1}".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
manifest = manifests[0]
else:
manifest = manifests
try:
self.manifest = HandlerManifest(manifest) # pylint: disable=W0201
if len(self.manifest.get_enable_command()) <= 0: # pylint: disable=len-as-condition
raise Exception(u"Manifest is missing the enable command")
except Exception as e: # pylint: disable=C0103
msg = u"Agent {0} has an illegal {1}: {2}".format(
self.name,
AGENT_MANIFEST_FILE,
ustr(e))
raise UpdateError(msg)
logger.verbose(
u"Agent {0} loaded manifest from {1}",
self.name,
self.get_agent_manifest_path())
logger.verbose(u"Successfully loaded Agent {0} {1}: {2}",
self.name,
AGENT_MANIFEST_FILE,
ustr(self.manifest.data))
return
def _unpack(self): # pylint: disable=R1711
try:
if os.path.isdir(self.get_agent_dir()):
shutil.rmtree(self.get_agent_dir())
zipfile.ZipFile(self.get_agent_pkg_path()).extractall(self.get_agent_dir())
except Exception as e: # pylint: disable=C0103
fileutil.clean_ioerror(e,
paths=[self.get_agent_dir(), self.get_agent_pkg_path()])
msg = u"Exception unpacking Agent {0} from {1}: {2}".format(
self.name,
self.get_agent_pkg_path(),
ustr(e))
raise UpdateError(msg)
if not os.path.isdir(self.get_agent_dir()):
msg = u"Unpacking Agent {0} failed to create directory {1}".format(
self.name,
self.get_agent_dir())
raise UpdateError(msg)
logger.verbose(
u"Agent {0} unpacked successfully to {1}",
self.name,
self.get_agent_dir())
return
class GuestAgentError(object):
def __init__(self, path): # pylint: disable=R1711
if path is None:
raise UpdateError(u"GuestAgentError requires a path")
self.path = path
self.clear()
return
def mark_failure(self, is_fatal=False): # pylint: disable=R1711
self.last_failure = time.time() # pylint: disable=W0201
self.failure_count += 1
self.was_fatal = is_fatal # pylint: disable=W0201
return
def clear(self): # pylint: disable=R1711
self.last_failure = 0.0
self.failure_count = 0
self.was_fatal = False
return
@property
def is_blacklisted(self):
return self.was_fatal or self.failure_count >= MAX_FAILURE
def load(self): # pylint: disable=R1711
if self.path is not None and os.path.isfile(self.path):
with open(self.path, 'r') as f: # pylint: disable=C0103
self.from_json(json.load(f))
return
def save(self): # pylint: disable=R1711
if os.path.isdir(os.path.dirname(self.path)):
with open(self.path, 'w') as f: # pylint: disable=C0103
json.dump(self.to_json(), f)
return
def from_json(self, data): # pylint: disable=R1711
self.last_failure = max( # pylint: disable=W0201
self.last_failure,
data.get(u"last_failure", 0.0))
self.failure_count = max( # pylint: disable=W0201
self.failure_count,
data.get(u"failure_count", 0))
self.was_fatal = self.was_fatal or data.get(u"was_fatal", False) # pylint: disable=W0201
return
def to_json(self):
data = {
u"last_failure": self.last_failure,
u"failure_count": self.failure_count,
u"was_fatal": self.was_fatal
}
return data
def __str__(self):
return "Last Failure: {0}, Total Failures: {1}, Fatal: {2}".format(
self.last_failure,
self.failure_count,
self.was_fatal)
| # Windows Azure Linux Agent
#
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import glob
import json
import os
import random
import re
import shutil
import signal
import stat
import subprocess
import sys
import time
import traceback
import uuid
import zipfile
from datetime import datetime, timedelta
import azurelinuxagent.common.conf as conf
import azurelinuxagent.common.logger as logger
import azurelinuxagent.common.utils.fileutil as fileutil
import azurelinuxagent.common.utils.restutil as restutil
import azurelinuxagent.common.utils.textutil as textutil
from azurelinuxagent.common.cgroupapi import CGroupsApi
from azurelinuxagent.common.cgroupconfigurator import CGroupConfigurator
from azurelinuxagent.common.event import add_event, initialize_event_logger_vminfo_common_parameters, \
elapsed_milliseconds, WALAEventOperation, EVENTS_DIRECTORY
from azurelinuxagent.common.exception import ResourceGoneError, UpdateError
from azurelinuxagent.common.future import ustr
from azurelinuxagent.common.osutil import get_osutil
from azurelinuxagent.common.protocol.util import get_protocol_util
from azurelinuxagent.common.protocol.hostplugin import HostPluginProtocol
from azurelinuxagent.common.utils.flexible_version import FlexibleVersion
from azurelinuxagent.common.version import AGENT_NAME, AGENT_VERSION, AGENT_DIR_PATTERN, CURRENT_AGENT,\
CURRENT_VERSION, DISTRO_NAME, DISTRO_VERSION, is_current_agent_installed, get_lis_version, \
has_logrotate, PY_VERSION_MAJOR, PY_VERSION_MINOR, PY_VERSION_MICRO
from azurelinuxagent.ga.collect_logs import get_collect_logs_handler, is_log_collection_allowed
from azurelinuxagent.ga.env import get_env_handler
from azurelinuxagent.ga.collect_telemetry_events import get_collect_telemetry_events_handler
from azurelinuxagent.ga.exthandlers import HandlerManifest, get_traceback, ExtHandlersHandler, list_agent_lib_directory, \
is_extension_telemetry_pipeline_enabled
from azurelinuxagent.ga.monitor import get_monitor_handler
# pylint: disable=C0302
from azurelinuxagent.ga.send_telemetry_events import get_send_telemetry_events_handler
AGENT_ERROR_FILE = "error.json" # File name for agent error record
AGENT_MANIFEST_FILE = "HandlerManifest.json"
AGENT_PARTITION_FILE = "partition"
CHILD_HEALTH_INTERVAL = 15 * 60
CHILD_LAUNCH_INTERVAL = 5 * 60
CHILD_LAUNCH_RESTART_MAX = 3
CHILD_POLL_INTERVAL = 60
MAX_FAILURE = 3 # Max failure allowed for agent before blacklisted
GOAL_STATE_INTERVAL_DISABLED = 5 * 60
ORPHAN_POLL_INTERVAL = 3
ORPHAN_WAIT_INTERVAL = 15 * 60
AGENT_SENTINEL_FILE = "current_version"
READONLY_FILE_GLOBS = [
"*.crt",
"*.p7m",
"*.pem",
"*.prv",
"ovf-env.xml"
]
def get_update_handler():
return UpdateHandler()
class UpdateHandler(object): # pylint: disable=R0902
TELEMETRY_HEARTBEAT_PERIOD = timedelta(minutes=30)
def __init__(self):
self.osutil = get_osutil()
self.protocol_util = get_protocol_util()
self.running = True
self.last_attempt_time = None
self.agents = []
self.child_agent = None
self.child_launch_time = None
self.child_launch_attempts = 0
self.child_process = None
self.signal_handler = None
self._last_telemetry_heartbeat = None
self._heartbeat_id = str(uuid.uuid4()).upper()
self._heartbeat_counter = 0
self._heartbeat_update_goal_state_error_count = 0
def run_latest(self, child_args=None): # pylint: disable=R0912,R1711
"""
This method is called from the daemon to find and launch the most
current, downloaded agent.
Note:
- Most events should be tagged to the launched agent (agent_version)
"""
if self.child_process is not None:
raise Exception("Illegal attempt to launch multiple goal state Agent processes")
if self.signal_handler is None:
self.signal_handler = signal.signal(signal.SIGTERM, self.forward_signal)
latest_agent = self.get_latest_agent()
if latest_agent is None:
logger.info(u"Installed Agent {0} is the most current agent", CURRENT_AGENT)
agent_cmd = "python -u {0} -run-exthandlers".format(sys.argv[0])
agent_dir = os.getcwd()
agent_name = CURRENT_AGENT
agent_version = CURRENT_VERSION
else:
logger.info(u"Determined Agent {0} to be the latest agent", latest_agent.name)
agent_cmd = latest_agent.get_agent_cmd()
agent_dir = latest_agent.get_agent_dir()
agent_name = latest_agent.name
agent_version = latest_agent.version
if child_args is not None:
agent_cmd = "{0} {1}".format(agent_cmd, child_args)
try:
# Launch the correct Python version for python-based agents
cmds = textutil.safe_shlex_split(agent_cmd)
if cmds[0].lower() == "python":
cmds[0] = sys.executable
agent_cmd = " ".join(cmds)
self._evaluate_agent_health(latest_agent)
self.child_process = subprocess.Popen(
cmds,
cwd=agent_dir,
stdout=sys.stdout,
stderr=sys.stderr,
env=os.environ)
logger.verbose(u"Agent {0} launched with command '{1}'", agent_name, agent_cmd)
# Setting the poll interval to poll every second to reduce the agent provisioning time;
# The daemon shouldn't wait for 60secs before starting the ext-handler in case the
# ext-handler kills itself during agent-update during the first 15 mins (CHILD_HEALTH_INTERVAL)
poll_interval = 1
ret = None
start_time = time.time()
while (time.time() - start_time) < CHILD_HEALTH_INTERVAL:
time.sleep(poll_interval)
try:
ret = self.child_process.poll()
except OSError:
# if child_process has terminated, calling poll could raise an exception
ret = -1
if ret is not None:
break
if ret is None or ret <= 0:
msg = u"Agent {0} launched with command '{1}' is successfully running".format(
agent_name,
agent_cmd)
logger.info(msg)
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=True,
message=msg,
log_event=False)
if ret is None:
ret = self.child_process.wait()
else:
msg = u"Agent {0} launched with command '{1}' failed with return code: {2}".format(
agent_name,
agent_cmd,
ret)
logger.warn(msg)
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=False,
message=msg)
if ret is not None and ret > 0:
msg = u"Agent {0} launched with command '{1}' returned code: {2}".format(
agent_name,
agent_cmd,
ret)
logger.warn(msg)
if latest_agent is not None:
latest_agent.mark_failure(is_fatal=True)
except Exception as e: # pylint: disable=C0103
# Ignore child errors during termination
if self.running:
msg = u"Agent {0} launched with command '{1}' failed with exception: {2}".format(
agent_name,
agent_cmd,
ustr(e))
logger.warn(msg)
detailed_message = '{0} {1}'.format(msg, traceback.format_exc())
add_event(
AGENT_NAME,
version=agent_version,
op=WALAEventOperation.Enable,
is_success=False,
message=detailed_message)
if latest_agent is not None:
latest_agent.mark_failure(is_fatal=True)
self.child_process = None
return
def run(self, debug=False): # pylint: disable=R0912
"""
This is the main loop which watches for agent and extension updates.
"""
try:
logger.info(u"Agent {0} is running as the goal state agent", CURRENT_AGENT)
#
# Fetch the goal state one time; some components depend on information provided by the goal state and this
# call ensures the required info is initialized (e.g telemetry depends on the container ID.)
#
protocol = self.protocol_util.get_protocol()
protocol.update_goal_state()
# Initialize the common parameters for telemetry events
initialize_event_logger_vminfo_common_parameters(protocol)
# Log OS-specific info.
os_info_msg = u"Distro: {dist_name}-{dist_ver}; "\
u"OSUtil: {util_name}; AgentService: {service_name}; "\
u"Python: {py_major}.{py_minor}.{py_micro}; "\
u"systemd: {systemd}; "\
u"LISDrivers: {lis_ver}; "\
u"logrotate: {has_logrotate};".format(
dist_name=DISTRO_NAME, dist_ver=DISTRO_VERSION,
util_name=type(self.osutil).__name__,
service_name=self.osutil.service_name,
py_major=PY_VERSION_MAJOR, py_minor=PY_VERSION_MINOR,
py_micro=PY_VERSION_MICRO, systemd=CGroupsApi.is_systemd(),
lis_ver=get_lis_version(), has_logrotate=has_logrotate()
)
logger.info(os_info_msg)
add_event(AGENT_NAME, op=WALAEventOperation.OSInfo, message=os_info_msg)
#
# Perform initialization tasks
#
from azurelinuxagent.ga.exthandlers import get_exthandlers_handler, migrate_handler_state
exthandlers_handler = get_exthandlers_handler(protocol)
migrate_handler_state()
from azurelinuxagent.ga.remoteaccess import get_remote_access_handler
remote_access_handler = get_remote_access_handler(protocol)
self._ensure_no_orphans()
self._emit_restart_event()
self._emit_changes_in_default_configuration()
self._ensure_partition_assigned()
self._ensure_readonly_files()
self._ensure_cgroups_initialized()
self._ensure_extension_telemetry_state_configured_properly(protocol)
# Get all thread handlers
telemetry_handler = get_send_telemetry_events_handler(self.protocol_util)
all_thread_handlers = [
get_monitor_handler(),
get_env_handler(),
telemetry_handler,
get_collect_telemetry_events_handler(telemetry_handler)
]
if is_log_collection_allowed():
all_thread_handlers.append(get_collect_logs_handler())
# Launch all monitoring threads
for thread_handler in all_thread_handlers:
thread_handler.run()
goal_state_interval = conf.get_goal_state_period() if conf.get_extensions_enabled() else GOAL_STATE_INTERVAL_DISABLED
while self.running:
#
# Check that the parent process (the agent's daemon) is still running
#
if not debug and self._is_orphaned:
logger.info("Agent {0} is an orphan -- exiting", CURRENT_AGENT)
break
#
# Check that all the threads are still running
#
for thread_handler in all_thread_handlers:
if not thread_handler.is_alive():
logger.warn("{0} thread died, restarting".format(thread_handler.get_thread_name()))
thread_handler.start()
#
# Process the goal state
#
if not protocol.try_update_goal_state():
self._heartbeat_update_goal_state_error_count += 1
else:
if self._upgrade_available(protocol):
available_agent = self.get_latest_agent()
if available_agent is None:
logger.info(
"Agent {0} is reverting to the installed agent -- exiting",
CURRENT_AGENT)
else:
logger.info(
u"Agent {0} discovered update {1} -- exiting",
CURRENT_AGENT,
available_agent.name)
break
utc_start = datetime.utcnow()
last_etag = exthandlers_handler.last_etag
exthandlers_handler.run()
remote_access_handler.run()
if last_etag != exthandlers_handler.last_etag:
self._ensure_readonly_files()
duration = elapsed_milliseconds(utc_start)
activity_id, correlation_id, gs_creation_time = exthandlers_handler.get_goal_state_debug_metadata()
msg = 'ProcessGoalState completed [Incarnation: {0}; {1} ms; Activity Id: {2}; Correlation Id: {3}; GS Creation Time: {4}]'.format(
exthandlers_handler.last_etag, duration, activity_id, correlation_id, gs_creation_time)
logger.info(msg)
add_event(
AGENT_NAME,
op=WALAEventOperation.ProcessGoalState,
duration=duration,
message=msg)
self._send_heartbeat_telemetry(protocol)
time.sleep(goal_state_interval)
except Exception as error:
msg = u"Agent {0} failed with exception: {1}".format(CURRENT_AGENT, ustr(error))
self._set_sentinel(msg=msg)
logger.warn(msg)
logger.warn(traceback.format_exc())
sys.exit(1)
# additional return here because sys.exit is mocked in unit tests
return
self._shutdown()
sys.exit(0)
def forward_signal(self, signum, frame):
if signum == signal.SIGTERM:
self._shutdown()
if self.child_process is None:
return
logger.info(
u"Agent {0} forwarding signal {1} to {2}",
CURRENT_AGENT,
signum,
self.child_agent.name if self.child_agent is not None else CURRENT_AGENT)
self.child_process.send_signal(signum)
if self.signal_handler not in (None, signal.SIG_IGN, signal.SIG_DFL):
self.signal_handler(signum, frame)
elif self.signal_handler is signal.SIG_DFL:
if signum == signal.SIGTERM:
self._shutdown()
sys.exit(0)
return
def get_latest_agent(self):
"""
If autoupdate is enabled, return the most current, downloaded,
non-blacklisted agent which is not the current version (if any).
Otherwise, return None (implying to use the installed agent).
"""
if not conf.get_autoupdate_enabled():
return None
self._find_agents()
available_agents = [agent for agent in self.agents
if agent.is_available
and agent.version > FlexibleVersion(AGENT_VERSION)]
return available_agents[0] if len(available_agents) >= 1 else None
def _emit_restart_event(self): # pylint: disable=R1711
try:
if not self._is_clean_start:
msg = u"Agent did not terminate cleanly: {0}".format(
fileutil.read_file(self._sentinel_file_path()))
logger.info(msg)
add_event(
AGENT_NAME,
version=CURRENT_VERSION,
op=WALAEventOperation.Restart,
is_success=False,
message=msg)
except Exception:
pass
return
@staticmethod
def _emit_changes_in_default_configuration():
try:
def log_if_int_changed_from_default(name, current):
default = conf.get_int_default_value(name)
if default != current:
msg = "{0} changed from its default; new value: {1}".format(name, current)
logger.info(msg)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=msg)
log_if_int_changed_from_default("Extensions.GoalStatePeriod", conf.get_goal_state_period())
if not conf.enable_firewall():
message = "OS.EnableFirewall is False"
logger.info(message)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=message)
else:
log_if_int_changed_from_default("OS.EnableFirewallPeriod", conf.get_enable_firewall_period())
if conf.get_lib_dir() != "/var/lib/waagent":
message = "lib dir is in an unexpected location: {0}".format(conf.get_lib_dir())
logger.info(message)
add_event(AGENT_NAME, op=WALAEventOperation.ConfigurationChange, message=message)
except Exception as e: # pylint: disable=C0103
logger.warn("Failed to log changes in configuration: {0}", ustr(e))
def _ensure_no_orphans(self, orphan_wait_interval=ORPHAN_WAIT_INTERVAL): # pylint: disable=R1711
pid_files, ignored = self._write_pid_file() # pylint: disable=W0612
for pid_file in pid_files:
try:
pid = fileutil.read_file(pid_file)
wait_interval = orphan_wait_interval
while self.osutil.check_pid_alive(pid):
wait_interval -= ORPHAN_POLL_INTERVAL
if wait_interval <= 0:
logger.warn(
u"{0} forcibly terminated orphan process {1}",
CURRENT_AGENT,
pid)
os.kill(pid, signal.SIGKILL)
break
logger.info(
u"{0} waiting for orphan process {1} to terminate",
CURRENT_AGENT,
pid)
time.sleep(ORPHAN_POLL_INTERVAL)
os.remove(pid_file)
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception occurred waiting for orphan agent to terminate: {0}",
ustr(e))
return
def _ensure_partition_assigned(self):
"""
Assign the VM to a partition (0 - 99). Downloaded updates may be configured
to run on only some VMs; the assigned partition determines eligibility.
"""
if not os.path.exists(self._partition_file):
partition = ustr(int(datetime.utcnow().microsecond / 10000))
fileutil.write_file(self._partition_file, partition)
add_event(
AGENT_NAME,
version=CURRENT_VERSION,
op=WALAEventOperation.Partition,
is_success=True,
message=partition)
def _ensure_readonly_files(self):
for g in READONLY_FILE_GLOBS: # pylint: disable=C0103
for path in glob.iglob(os.path.join(conf.get_lib_dir(), g)):
os.chmod(path, stat.S_IRUSR)
def _ensure_cgroups_initialized(self):
configurator = CGroupConfigurator.get_instance()
configurator.initialize()
def _evaluate_agent_health(self, latest_agent):
"""
Evaluate the health of the selected agent: If it is restarting
too frequently, raise an Exception to force blacklisting.
"""
if latest_agent is None:
self.child_agent = None
return
if self.child_agent is None or latest_agent.version != self.child_agent.version:
self.child_agent = latest_agent
self.child_launch_time = None
self.child_launch_attempts = 0
if self.child_launch_time is None:
self.child_launch_time = time.time()
self.child_launch_attempts += 1
if (time.time() - self.child_launch_time) <= CHILD_LAUNCH_INTERVAL \
and self.child_launch_attempts >= CHILD_LAUNCH_RESTART_MAX:
msg = u"Agent {0} restarted more than {1} times in {2} seconds".format(
self.child_agent.name,
CHILD_LAUNCH_RESTART_MAX,
CHILD_LAUNCH_INTERVAL)
raise Exception(msg)
return
def _filter_blacklisted_agents(self):
self.agents = [agent for agent in self.agents if not agent.is_blacklisted]
def _find_agents(self): # pylint: disable=R1711
"""
Load all non-blacklisted agents currently on disk.
"""
try:
self._set_agents(self._load_agents())
self._filter_blacklisted_agents()
except Exception as e: # pylint: disable=C0103
logger.warn(u"Exception occurred loading available agents: {0}", ustr(e))
return
def _get_host_plugin(self, protocol):
return protocol.client.get_host_plugin() if protocol and protocol.client else None
def _get_pid_parts(self):
pid_file = conf.get_agent_pid_file_path()
pid_dir = os.path.dirname(pid_file)
pid_name = os.path.basename(pid_file)
pid_re = re.compile("(\d+)_{0}".format(re.escape(pid_name))) # pylint: disable=W1401
return pid_dir, pid_name, pid_re
def _get_pid_files(self):
pid_dir, pid_name, pid_re = self._get_pid_parts() # pylint: disable=W0612
pid_files = [os.path.join(pid_dir, f) for f in os.listdir(pid_dir) if pid_re.match(f)]
pid_files.sort(key=lambda f: int(pid_re.match(os.path.basename(f)).group(1)))
return pid_files
@property
def _is_clean_start(self):
return not os.path.isfile(self._sentinel_file_path())
@property
def _is_orphaned(self):
parent_pid = os.getppid()
if parent_pid in (1, None):
return True
if not os.path.isfile(conf.get_agent_pid_file_path()):
return True
return fileutil.read_file(conf.get_agent_pid_file_path()) != ustr(parent_pid)
def _is_version_eligible(self, version):
# Ensure the installed version is always eligible
if version == CURRENT_VERSION and is_current_agent_installed():
return True
for agent in self.agents:
if agent.version == version:
return agent.is_available
return False
def _load_agents(self):
path = os.path.join(conf.get_lib_dir(), "{0}-*".format(AGENT_NAME))
return [GuestAgent(path=agent_dir)
for agent_dir in glob.iglob(path) if os.path.isdir(agent_dir)]
def _partition(self):
return int(fileutil.read_file(self._partition_file))
@property
def _partition_file(self):
return os.path.join(conf.get_lib_dir(), AGENT_PARTITION_FILE)
def _purge_agents(self): # pylint: disable=R1711
"""
Remove from disk all directories and .zip files of unknown agents
(without removing the current, running agent).
"""
path = os.path.join(conf.get_lib_dir(), "{0}-*".format(AGENT_NAME))
known_versions = [agent.version for agent in self.agents]
if CURRENT_VERSION not in known_versions:
logger.verbose(
u"Running Agent {0} was not found in the agent manifest - adding to list",
CURRENT_VERSION)
known_versions.append(CURRENT_VERSION)
for agent_path in glob.iglob(path):
try:
name = fileutil.trim_ext(agent_path, "zip")
m = AGENT_DIR_PATTERN.match(name) # pylint: disable=C0103
if m is not None and FlexibleVersion(m.group(1)) not in known_versions:
if os.path.isfile(agent_path):
logger.info(u"Purging outdated Agent file {0}", agent_path)
os.remove(agent_path)
else:
logger.info(u"Purging outdated Agent directory {0}", agent_path)
shutil.rmtree(agent_path)
except Exception as e: # pylint: disable=C0103
logger.warn(u"Purging {0} raised exception: {1}", agent_path, ustr(e))
return
def _set_agents(self, agents=None): # pylint: disable=R1711
if agents is None:
agents = []
self.agents = agents
self.agents.sort(key=lambda agent: agent.version, reverse=True)
return
def _set_sentinel(self, agent=CURRENT_AGENT, msg="Unknown cause"): # pylint: disable=R1711
try:
fileutil.write_file(
self._sentinel_file_path(),
"[{0}] [{1}]".format(agent, msg))
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception writing sentinel file {0}: {1}",
self._sentinel_file_path(),
str(e))
return
def _sentinel_file_path(self):
return os.path.join(conf.get_lib_dir(), AGENT_SENTINEL_FILE)
def _shutdown(self):
# Todo: Ensure all threads stopped when shutting down the main extension handler to ensure that the state of
# all threads is clean.
self.running = False
if not os.path.isfile(self._sentinel_file_path()):
return
try:
os.remove(self._sentinel_file_path())
except Exception as e: # pylint: disable=C0103
logger.warn(
u"Exception removing sentinel file {0}: {1}",
self._sentinel_file_path(),
str(e))
return
def _upgrade_available(self, protocol, base_version=CURRENT_VERSION):
# Ignore new agents if updating is disabled
if not conf.get_autoupdate_enabled():
return False
now = time.time()
if self.last_attempt_time is not None:
next_attempt_time = self.last_attempt_time + \
conf.get_autoupdate_frequency()
else:
next_attempt_time = now
if next_attempt_time > now:
return False
family = conf.get_autoupdate_gafamily()
logger.info("Checking for agent updates (family: {0})", family)
self.last_attempt_time = now
try:
manifest_list, etag = protocol.get_vmagent_manifests()
manifests = [m for m in manifest_list.vmAgentManifests \
if m.family == family and len(m.versionsManifestUris) > 0]
if len(manifests) == 0: # pylint: disable=len-as-condition
logger.verbose(u"Incarnation {0} has no {1} agent updates",
etag, family)
return False
pkg_list = protocol.get_vmagent_pkgs(manifests[0])
# Set the agents to those available for download at least as
# current as the existing agent and remove from disk any agent
# no longer reported to the VM.
# Note:
# The code leaves on disk available, but blacklisted, agents
# so as to preserve the state. Otherwise, those agents could be
# again downloaded and inappropriately retried.
host = self._get_host_plugin(protocol=protocol)
self._set_agents([GuestAgent(pkg=pkg, host=host) for pkg in pkg_list.versions])
self._purge_agents()
self._filter_blacklisted_agents()
# Return True if current agent is no longer available or an
# agent with a higher version number is available
return not self._is_version_eligible(base_version) \
or (len(self.agents) > 0 and self.agents[0].version > base_version)
except Exception as e: # pylint: disable=W0612,C0103
msg = u"Exception retrieving agent manifests: {0}".format(ustr(traceback.format_exc()))
add_event(AGENT_NAME, op=WALAEventOperation.Download, version=CURRENT_VERSION, is_success=False,
message=msg)
return False
def _write_pid_file(self):
pid_files = self._get_pid_files()
pid_dir, pid_name, pid_re = self._get_pid_parts()
previous_pid_file = None if len(pid_files) <= 0 else pid_files[-1] # pylint: disable=len-as-condition
pid_index = -1 \
if previous_pid_file is None \
else int(pid_re.match(os.path.basename(previous_pid_file)).group(1))
pid_file = os.path.join(pid_dir, "{0}_{1}".format(pid_index + 1, pid_name))
try:
fileutil.write_file(pid_file, ustr(os.getpid()))
logger.info(u"{0} running as process {1}", CURRENT_AGENT, ustr(os.getpid()))
except Exception as e: # pylint: disable=C0103
pid_file = None
logger.warn(
u"Expection writing goal state agent {0} pid to {1}: {2}",
CURRENT_AGENT,
pid_file,
ustr(e))
return pid_files, pid_file
def _send_heartbeat_telemetry(self, protocol):
if self._last_telemetry_heartbeat is None:
self._last_telemetry_heartbeat = datetime.utcnow() - UpdateHandler.TELEMETRY_HEARTBEAT_PERIOD
if datetime.utcnow() >= (self._last_telemetry_heartbeat + UpdateHandler.TELEMETRY_HEARTBEAT_PERIOD):
dropped_packets = self.osutil.get_firewall_dropped_packets(protocol.get_endpoint())
auto_update_enabled = 1 if conf.get_autoupdate_enabled() else 0
telemetry_msg = "{0};{1};{2};{3};{4}".format(self._heartbeat_counter, self._heartbeat_id, dropped_packets,
self._heartbeat_update_goal_state_error_count,
auto_update_enabled)
debug_log_msg = "[DEBUG HeartbeatCounter: {0};HeartbeatId: {1};DroppedPackets: {2};" \
"UpdateGSErrors: {3};AutoUpdate: {4}]".format(self._heartbeat_counter,
self._heartbeat_id, dropped_packets,
self._heartbeat_update_goal_state_error_count,
auto_update_enabled)
# Write Heartbeat events/logs
add_event(name=AGENT_NAME, version=CURRENT_VERSION, op=WALAEventOperation.HeartBeat, is_success=True,
message=telemetry_msg, log_event=False)
logger.info(u"[HEARTBEAT] Agent {0} is running as the goal state agent {1}", CURRENT_AGENT, debug_log_msg)
# Update/Reset the counters
self._heartbeat_counter += 1
self._heartbeat_update_goal_state_error_count = 0
self._last_telemetry_heartbeat = datetime.utcnow()
@staticmethod
def _ensure_extension_telemetry_state_configured_properly(protocol):
for name, path in list_agent_lib_directory(skip_agent_package=True):
try:
handler_instance = ExtHandlersHandler.get_ext_handler_instance_from_path(name=name,
path=path,
protocol=protocol)
except Exception:
# Ignore errors if any
continue
try:
if handler_instance is not None:
# Recreate the HandlerEnvironment for existing extensions on startup.
# This is to ensure that existing extensions can start using the telemetry pipeline if they support
# it and also ensures that the extensions are not sending out telemetry if the Agent has to disable the feature.
handler_instance.create_handler_env()
except Exception as e: # pylint: disable=C0103
logger.warn(
"Unable to re-create HandlerEnvironment file on service startup. Error: {0}".format(ustr(e)))
continue
try:
if not is_extension_telemetry_pipeline_enabled():
# If extension telemetry pipeline is disabled, ensure we delete all existing extension events directory
# because the agent will not be listening on those events.
extension_event_dirs = glob.glob(os.path.join(conf.get_ext_log_dir(), "*", EVENTS_DIRECTORY))
for ext_dir in extension_event_dirs:
shutil.rmtree(ext_dir, ignore_errors=True)
except Exception as e: # pylint: disable=C0103
logger.warn("Error when trying to delete existing Extension events directory. Error: {0}".format(ustr(e)))
class GuestAgent(object):
def __init__(self, path=None, pkg=None, host=None):
self.pkg = pkg
self.host = host
version = None
if path is not None:
m = AGENT_DIR_PATTERN.match(path) # pylint: disable=C0103
if m == None: # pylint: disable=C0121
raise UpdateError(u"Illegal agent directory: {0}".format(path))
version = m.group(1)
elif self.pkg is not None:
version = pkg.version
if version == None: # pylint: disable=C0121
raise UpdateError(u"Illegal agent version: {0}".format(version))
self.version = FlexibleVersion(version)
location = u"disk" if path is not None else u"package"
logger.verbose(u"Loading Agent {0} from {1}", self.name, location)
self.error = GuestAgentError(self.get_agent_error_file())
self.error.load()
try:
self._ensure_downloaded()
self._ensure_loaded()
except Exception as e: # pylint: disable=C0103
if isinstance(e, ResourceGoneError):
raise
# The agent was improperly blacklisting versions due to a timeout
# encountered while downloading a later version. Errors of type
# socket.error are IOError, so this should provide sufficient
# protection against a large class of I/O operation failures.
if isinstance(e, IOError):
raise
# Note the failure, blacklist the agent if the package downloaded
# - An exception with a downloaded package indicates the package
# is corrupt (e.g., missing the HandlerManifest.json file)
self.mark_failure(is_fatal=os.path.isfile(self.get_agent_pkg_path()))
msg = u"Agent {0} install failed with exception: {1}".format(
self.name, ustr(e))
detailed_msg = '{0} {1}'.format(msg, traceback.extract_tb(get_traceback(e)))
add_event(
AGENT_NAME,
version=self.version,
op=WALAEventOperation.Install,
is_success=False,
message=detailed_msg)
@property
def name(self):
return "{0}-{1}".format(AGENT_NAME, self.version)
def get_agent_cmd(self):
return self.manifest.get_enable_command()
def get_agent_dir(self):
return os.path.join(conf.get_lib_dir(), self.name)
def get_agent_error_file(self):
return os.path.join(conf.get_lib_dir(), self.name, AGENT_ERROR_FILE)
def get_agent_manifest_path(self):
return os.path.join(self.get_agent_dir(), AGENT_MANIFEST_FILE)
def get_agent_pkg_path(self):
return ".".join((os.path.join(conf.get_lib_dir(), self.name), "zip"))
def clear_error(self):
self.error.clear()
self.error.save()
@property
def is_available(self):
return self.is_downloaded and not self.is_blacklisted
@property
def is_blacklisted(self):
return self.error is not None and self.error.is_blacklisted
@property
def is_downloaded(self):
return self.is_blacklisted or \
os.path.isfile(self.get_agent_manifest_path())
def mark_failure(self, is_fatal=False):
try:
if not os.path.isdir(self.get_agent_dir()):
os.makedirs(self.get_agent_dir())
self.error.mark_failure(is_fatal=is_fatal)
self.error.save()
if self.error.is_blacklisted:
logger.warn(u"Agent {0} is permanently blacklisted", self.name)
except Exception as e: # pylint: disable=C0103
logger.warn(u"Agent {0} failed recording error state: {1}", self.name, ustr(e))
def _ensure_downloaded(self):
logger.verbose(u"Ensuring Agent {0} is downloaded", self.name)
if self.is_downloaded:
logger.verbose(u"Agent {0} was previously downloaded - skipping download", self.name)
return
if self.pkg is None:
raise UpdateError(u"Agent {0} is missing package and download URIs".format(
self.name))
self._download()
self._unpack()
msg = u"Agent {0} downloaded successfully".format(self.name)
logger.verbose(msg)
add_event(
AGENT_NAME,
version=self.version,
op=WALAEventOperation.Install,
is_success=True,
message=msg)
def _ensure_loaded(self):
self._load_manifest()
self._load_error()
def _download(self):
uris_shuffled = self.pkg.uris
random.shuffle(uris_shuffled)
for uri in uris_shuffled:
if not HostPluginProtocol.is_default_channel and self._fetch(uri.uri): # pylint: disable=R1723
break
elif self.host is not None and self.host.ensure_initialized():
if not HostPluginProtocol.is_default_channel:
logger.warn("Download failed, switching to host plugin")
else:
logger.verbose("Using host plugin as default channel")
uri, headers = self.host.get_artifact_request(uri.uri, self.host.manifest_uri)
try:
if self._fetch(uri, headers=headers, use_proxy=False): # pylint: disable=R1723
if not HostPluginProtocol.is_default_channel:
logger.verbose("Setting host plugin as default channel")
HostPluginProtocol.is_default_channel = True
break
else:
logger.warn("Host plugin download failed")
# If the HostPlugin rejects the request,
# let the error continue, but set to use the HostPlugin
except ResourceGoneError:
HostPluginProtocol.is_default_channel = True
raise
else:
logger.error("No download channels available")
if not os.path.isfile(self.get_agent_pkg_path()):
msg = u"Unable to download Agent {0} from any URI".format(self.name)
add_event(
AGENT_NAME,
op=WALAEventOperation.Download,
version=CURRENT_VERSION,
is_success=False,
message=msg)
raise UpdateError(msg)
def _fetch(self, uri, headers=None, use_proxy=True):
package = None
try:
is_healthy = True
error_response = ''
resp = restutil.http_get(uri, use_proxy=use_proxy, headers=headers)
if restutil.request_succeeded(resp):
package = resp.read()
fileutil.write_file(self.get_agent_pkg_path(),
bytearray(package),
asbin=True)
logger.verbose(u"Agent {0} downloaded from {1}", self.name, uri)
else:
error_response = restutil.read_response_error(resp)
logger.verbose("Fetch was unsuccessful [{0}]", error_response)
is_healthy = not restutil.request_failed_at_hostplugin(resp)
if self.host is not None:
self.host.report_fetch_health(uri, is_healthy, source='GuestAgent', response=error_response)
except restutil.HttpError as http_error:
if isinstance(http_error, ResourceGoneError):
raise
logger.verbose(u"Agent {0} download from {1} failed [{2}]",
self.name,
uri,
http_error)
return package is not None
def _load_error(self):
try:
self.error = GuestAgentError(self.get_agent_error_file())
self.error.load()
logger.verbose(u"Agent {0} error state: {1}", self.name, ustr(self.error))
except Exception as e: # pylint: disable=C0103
logger.warn(u"Agent {0} failed loading error state: {1}", self.name, ustr(e))
def _load_manifest(self): # pylint: disable=R1711
path = self.get_agent_manifest_path()
if not os.path.isfile(path):
msg = u"Agent {0} is missing the {1} file".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
with open(path, "r") as manifest_file:
try:
manifests = json.load(manifest_file)
except Exception as e: # pylint: disable=C0103
msg = u"Agent {0} has a malformed {1}".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
if type(manifests) is list: # pylint: disable=C0123
if len(manifests) <= 0: # pylint: disable=len-as-condition
msg = u"Agent {0} has an empty {1}".format(self.name, AGENT_MANIFEST_FILE)
raise UpdateError(msg)
manifest = manifests[0]
else:
manifest = manifests
try:
self.manifest = HandlerManifest(manifest) # pylint: disable=W0201
if len(self.manifest.get_enable_command()) <= 0: # pylint: disable=len-as-condition
raise Exception(u"Manifest is missing the enable command")
except Exception as e: # pylint: disable=C0103
msg = u"Agent {0} has an illegal {1}: {2}".format(
self.name,
AGENT_MANIFEST_FILE,
ustr(e))
raise UpdateError(msg)
logger.verbose(
u"Agent {0} loaded manifest from {1}",
self.name,
self.get_agent_manifest_path())
logger.verbose(u"Successfully loaded Agent {0} {1}: {2}",
self.name,
AGENT_MANIFEST_FILE,
ustr(self.manifest.data))
return
def _unpack(self): # pylint: disable=R1711
try:
if os.path.isdir(self.get_agent_dir()):
shutil.rmtree(self.get_agent_dir())
zipfile.ZipFile(self.get_agent_pkg_path()).extractall(self.get_agent_dir())
except Exception as e: # pylint: disable=C0103
fileutil.clean_ioerror(e,
paths=[self.get_agent_dir(), self.get_agent_pkg_path()])
msg = u"Exception unpacking Agent {0} from {1}: {2}".format(
self.name,
self.get_agent_pkg_path(),
ustr(e))
raise UpdateError(msg)
if not os.path.isdir(self.get_agent_dir()):
msg = u"Unpacking Agent {0} failed to create directory {1}".format(
self.name,
self.get_agent_dir())
raise UpdateError(msg)
logger.verbose(
u"Agent {0} unpacked successfully to {1}",
self.name,
self.get_agent_dir())
return
class GuestAgentError(object):
def __init__(self, path): # pylint: disable=R1711
if path is None:
raise UpdateError(u"GuestAgentError requires a path")
self.path = path
self.clear()
return
def mark_failure(self, is_fatal=False): # pylint: disable=R1711
self.last_failure = time.time() # pylint: disable=W0201
self.failure_count += 1
self.was_fatal = is_fatal # pylint: disable=W0201
return
def clear(self): # pylint: disable=R1711
self.last_failure = 0.0
self.failure_count = 0
self.was_fatal = False
return
@property
def is_blacklisted(self):
return self.was_fatal or self.failure_count >= MAX_FAILURE
def load(self): # pylint: disable=R1711
if self.path is not None and os.path.isfile(self.path):
with open(self.path, 'r') as f: # pylint: disable=C0103
self.from_json(json.load(f))
return
def save(self): # pylint: disable=R1711
if os.path.isdir(os.path.dirname(self.path)):
with open(self.path, 'w') as f: # pylint: disable=C0103
json.dump(self.to_json(), f)
return
def from_json(self, data): # pylint: disable=R1711
self.last_failure = max( # pylint: disable=W0201
self.last_failure,
data.get(u"last_failure", 0.0))
self.failure_count = max( # pylint: disable=W0201
self.failure_count,
data.get(u"failure_count", 0))
self.was_fatal = self.was_fatal or data.get(u"was_fatal", False) # pylint: disable=W0201
return
def to_json(self):
data = {
u"last_failure": self.last_failure,
u"failure_count": self.failure_count,
u"was_fatal": self.was_fatal
}
return data
def __str__(self):
return "Last Failure: {0}, Total Failures: {1}, Fatal: {2}".format(
self.last_failure,
self.failure_count,
self.was_fatal)
| en | 0.798626 | # Windows Azure Linux Agent # # Copyright 2018 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.6+ and Openssl 1.0+ # # pylint: disable=C0302 # File name for agent error record # Max failure allowed for agent before blacklisted # pylint: disable=R0902 # pylint: disable=R0912,R1711 This method is called from the daemon to find and launch the most current, downloaded agent. Note: - Most events should be tagged to the launched agent (agent_version) # Launch the correct Python version for python-based agents # Setting the poll interval to poll every second to reduce the agent provisioning time; # The daemon shouldn't wait for 60secs before starting the ext-handler in case the # ext-handler kills itself during agent-update during the first 15 mins (CHILD_HEALTH_INTERVAL) # if child_process has terminated, calling poll could raise an exception # pylint: disable=C0103 # Ignore child errors during termination # pylint: disable=R0912 This is the main loop which watches for agent and extension updates. # # Fetch the goal state one time; some components depend on information provided by the goal state and this # call ensures the required info is initialized (e.g telemetry depends on the container ID.) # # Initialize the common parameters for telemetry events # Log OS-specific info. # # Perform initialization tasks # # Get all thread handlers # Launch all monitoring threads # # Check that the parent process (the agent's daemon) is still running # # # Check that all the threads are still running # # # Process the goal state # # additional return here because sys.exit is mocked in unit tests If autoupdate is enabled, return the most current, downloaded, non-blacklisted agent which is not the current version (if any). Otherwise, return None (implying to use the installed agent). # pylint: disable=R1711 # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=W0612 # pylint: disable=C0103 Assign the VM to a partition (0 - 99). Downloaded updates may be configured to run on only some VMs; the assigned partition determines eligibility. # pylint: disable=C0103 Evaluate the health of the selected agent: If it is restarting too frequently, raise an Exception to force blacklisting. # pylint: disable=R1711 Load all non-blacklisted agents currently on disk. # pylint: disable=C0103 # pylint: disable=W1401 # pylint: disable=W0612 # Ensure the installed version is always eligible # pylint: disable=R1711 Remove from disk all directories and .zip files of unknown agents (without removing the current, running agent). # pylint: disable=C0103 # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=R1711 # pylint: disable=C0103 # Todo: Ensure all threads stopped when shutting down the main extension handler to ensure that the state of # all threads is clean. # pylint: disable=C0103 # Ignore new agents if updating is disabled # pylint: disable=len-as-condition # Set the agents to those available for download at least as # current as the existing agent and remove from disk any agent # no longer reported to the VM. # Note: # The code leaves on disk available, but blacklisted, agents # so as to preserve the state. Otherwise, those agents could be # again downloaded and inappropriately retried. # Return True if current agent is no longer available or an # agent with a higher version number is available # pylint: disable=W0612,C0103 # pylint: disable=len-as-condition # pylint: disable=C0103 # Write Heartbeat events/logs # Update/Reset the counters # Ignore errors if any # Recreate the HandlerEnvironment for existing extensions on startup. # This is to ensure that existing extensions can start using the telemetry pipeline if they support # it and also ensures that the extensions are not sending out telemetry if the Agent has to disable the feature. # pylint: disable=C0103 # If extension telemetry pipeline is disabled, ensure we delete all existing extension events directory # because the agent will not be listening on those events. # pylint: disable=C0103 # pylint: disable=C0103 # pylint: disable=C0121 # pylint: disable=C0121 # pylint: disable=C0103 # The agent was improperly blacklisting versions due to a timeout # encountered while downloading a later version. Errors of type # socket.error are IOError, so this should provide sufficient # protection against a large class of I/O operation failures. # Note the failure, blacklist the agent if the package downloaded # - An exception with a downloaded package indicates the package # is corrupt (e.g., missing the HandlerManifest.json file) # pylint: disable=C0103 # pylint: disable=R1723 # pylint: disable=R1723 # If the HostPlugin rejects the request, # let the error continue, but set to use the HostPlugin # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=C0103 # pylint: disable=C0123 # pylint: disable=len-as-condition # pylint: disable=W0201 # pylint: disable=len-as-condition # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=R1711 # pylint: disable=W0201 # pylint: disable=W0201 # pylint: disable=R1711 # pylint: disable=R1711 # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=C0103 # pylint: disable=R1711 # pylint: disable=W0201 # pylint: disable=W0201 # pylint: disable=W0201 | 1.428438 | 1 |
Experimenter/Experiments/script/timesetgrating.py | chrox/RealTimeElectrophy | 4 | 6631845 | <filename>Experimenter/Experiments/script/timesetgrating.py
# Generate arbitrary onset and offset timing gratings.
#
# Copyright (C) 2010-2011 <NAME>
#
# See LICENSE.TXT that came with this file.
from __future__ import division
from StimControl.LightStim.SweepSeque import TimingSeque
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.Grating import TimingSetGrating,RandPhaseTimingSetGrating,OrthOriTimingSetGrating
p_left = dictattr()
p_left.ml = 0.5
p_left.tfreqCycSec = 0.0
p_left.bgbrightness = 0.0
p_left.contrast = 1
p_left.phase0 = 0
p_right = dictattr()
p_right.ml = 0.5
p_right.tfreqCycSec = 0.0
p_right.bgbrightness = 0.0
p_right.contrast = 1
p_right.phase0 = 0
stim_interval = 0.0
pre_left = 0.0 if stim_interval > 0 else abs(stim_interval)
pre_right = 0.0 if stim_interval <= 0 else stim_interval
repeats = 1600
rand_phase = False
orth_eye = 'left'
cycle_left = dictattr(duration=0.132, pre=pre_left, stimulus=0.016)
cycle_right = dictattr(duration=0.132, pre=pre_right, stimulus=0.016)
block_left = dictattr(repeat=repeats, cycle=cycle_left, interval=0.0)
block_right = dictattr(repeat=repeats, cycle=cycle_right, interval=0.0)
sequence_left = TimingSeque(repeat=1, block=block_left, shuffle=True)
sequence_right = TimingSeque(repeat=1, block=block_right, shuffle=True)
if rand_phase:
if orth_eye is None:
grating_left = RandPhaseTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = RandPhaseTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
elif orth_eye == 'left':
grating_left = OrthOriTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = RandPhaseTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
elif orth_eye == 'right':
grating_left = RandPhaseTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = OrthOriTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
else:
grating_left = TimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = TimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
sweep = FrameSweep()
sweep.add_stimulus(grating_left)
sweep.add_stimulus(grating_right)
sweep.go(prestim=5.0,poststim=5.0,RSTART=True)
| <filename>Experimenter/Experiments/script/timesetgrating.py
# Generate arbitrary onset and offset timing gratings.
#
# Copyright (C) 2010-2011 <NAME>
#
# See LICENSE.TXT that came with this file.
from __future__ import division
from StimControl.LightStim.SweepSeque import TimingSeque
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.Grating import TimingSetGrating,RandPhaseTimingSetGrating,OrthOriTimingSetGrating
p_left = dictattr()
p_left.ml = 0.5
p_left.tfreqCycSec = 0.0
p_left.bgbrightness = 0.0
p_left.contrast = 1
p_left.phase0 = 0
p_right = dictattr()
p_right.ml = 0.5
p_right.tfreqCycSec = 0.0
p_right.bgbrightness = 0.0
p_right.contrast = 1
p_right.phase0 = 0
stim_interval = 0.0
pre_left = 0.0 if stim_interval > 0 else abs(stim_interval)
pre_right = 0.0 if stim_interval <= 0 else stim_interval
repeats = 1600
rand_phase = False
orth_eye = 'left'
cycle_left = dictattr(duration=0.132, pre=pre_left, stimulus=0.016)
cycle_right = dictattr(duration=0.132, pre=pre_right, stimulus=0.016)
block_left = dictattr(repeat=repeats, cycle=cycle_left, interval=0.0)
block_right = dictattr(repeat=repeats, cycle=cycle_right, interval=0.0)
sequence_left = TimingSeque(repeat=1, block=block_left, shuffle=True)
sequence_right = TimingSeque(repeat=1, block=block_right, shuffle=True)
if rand_phase:
if orth_eye is None:
grating_left = RandPhaseTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = RandPhaseTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
elif orth_eye == 'left':
grating_left = OrthOriTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = RandPhaseTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
elif orth_eye == 'right':
grating_left = RandPhaseTimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = OrthOriTimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
else:
grating_left = TimingSetGrating(viewport='left', params=p_left, sweepseq=sequence_left)
grating_right = TimingSetGrating(viewport='right', params=p_right, sweepseq=sequence_right)
sweep = FrameSweep()
sweep.add_stimulus(grating_left)
sweep.add_stimulus(grating_right)
sweep.go(prestim=5.0,poststim=5.0,RSTART=True)
| en | 0.816131 | # Generate arbitrary onset and offset timing gratings. # # Copyright (C) 2010-2011 <NAME> # # See LICENSE.TXT that came with this file. | 2.18213 | 2 |
boardStateDriver/boardFunc/resetBoard.py | terpyPy/ButtonBox | 0 | 6631846 | def resetBoard(theBoard):
# get a copy of the board to modify and return
newBoard = theBoard
for boardIndex, val in enumerate(newBoard): # enumerate gets the index and the value for each iteration of loop
# if the value of boardIndex is not 0 turn it off
if val != 0:
newBoard[boardIndex] = 0
# else dont reset value, skip it
else:
pass
# return the Board modified copy
return newBoard
if __name__ == "__main__":
i = [1] * 16
print('script run as __main__ test block')
print('all on array: ', i)
resetBoard(i)
print('after reset test: ', i) | def resetBoard(theBoard):
# get a copy of the board to modify and return
newBoard = theBoard
for boardIndex, val in enumerate(newBoard): # enumerate gets the index and the value for each iteration of loop
# if the value of boardIndex is not 0 turn it off
if val != 0:
newBoard[boardIndex] = 0
# else dont reset value, skip it
else:
pass
# return the Board modified copy
return newBoard
if __name__ == "__main__":
i = [1] * 16
print('script run as __main__ test block')
print('all on array: ', i)
resetBoard(i)
print('after reset test: ', i) | en | 0.660075 | # get a copy of the board to modify and return # enumerate gets the index and the value for each iteration of loop # if the value of boardIndex is not 0 turn it off # else dont reset value, skip it # return the Board modified copy | 3.482903 | 3 |
Self-Consistent-Field/SOUHF.py | andyj10224/psi4numpy | 214 | 6631847 | <filename>Self-Consistent-Field/SOUHF.py
"""
Unrestricted open-shell Hartree-Fock using direct second-order
convergence acceleration.
References:
- UHF equations & algorithm from [Szabo:1996]
- SO equations & algorithm from [Helgaker:2000]
"""
__authors__ = "<NAME>"
__credits__ = ["<NAME>"]
__copyright__ = "(c) 2014-2018, The Psi4NumPy Developers"
__license__ = "BSD-3-Clause"
__date__ = "2017-9-30"
import time
import numpy as np
np.set_printoptions(precision=5, linewidth=200, suppress=True)
import psi4
# Set Psi4 memory and output options
psi4.set_memory('2 GB')
psi4.core.set_output_file('output.dat', False)
# Triplet oxygen
mol = psi4.geometry("""
0 3
O
O 1 1.2
symmetry c1
""")
psi4.set_options({'basis': 'aug-cc-pvdz',
'reference': 'uhf'})
# Set defaults
maxiter = 10
E_conv = 1.0E-13
D_conv = 1.0E-13
# Integral generation from Psi4's MintsHelper
wfn = psi4.core.Wavefunction.build(mol, psi4.core.get_global_option('BASIS'))
mints = psi4.core.MintsHelper(wfn.basisset())
S = np.asarray(mints.ao_overlap())
V = np.asarray(mints.ao_potential())
T = np.asarray(mints.ao_kinetic())
# Occupations
nbf = wfn.nso()
nalpha = wfn.nalpha()
nbeta = wfn.nbeta()
if nbf > 100:
raise Exception("This has a N^4 memory overhead, killing if nbf > 100.")
H = T + V
# Orthogonalizer A = S^(-1/2)
A = mints.ao_overlap()
A.power(-0.5, 1.e-16)
A = np.asarray(A)
# ERI's
I = np.asarray(mints.ao_eri())
# Steal a good starting guess
psi4.set_options({'e_convergence': 1e-4,
'd_convergence': 1e-4,
'maxiter': 7,
'guess': 'sad'})
scf_e, wfn = psi4.energy('SCF', return_wfn=True)
Ca = np.array(wfn.Ca())
Da = np.array(wfn.Da())
Cb = np.array(wfn.Cb())
Db = np.array(wfn.Db())
nalpha = wfn.nalpha()
nbeta = wfn.nbeta()
t = time.time()
E = 0.0
Enuc = mol.nuclear_repulsion_energy()
Eold = 0.0
print('\nTotal time taken for setup: %.3f seconds' % (time.time() - t))
print('\nStart SCF iterations:\n')
t = time.time()
def transform(I, C1, C2, C3, C4):
#MO = np.einsum('pA,pqrs->Aqrs', C1, I)
nao = I.shape[0]
MO = np.dot(C1.T, I.reshape(nao, -1)).reshape(C1.shape[1], nao, nao, nao)
MO = np.einsum('qB,Aqrs->ABrs', C2, MO)
MO = np.einsum('rC,ABrs->ABCs', C3, MO)
MO = np.einsum('sD,ABCs->ABCD', C4, MO)
return MO
# Rotate orbs and produce C and D matrices
def rotate_orbs(C, x, nocc):
U = np.zeros_like(C)
U[:nocc, nocc:] = x
U[nocc:, :nocc] = -x.T
expU = U.copy()
expU[np.diag_indices_from(U)] += 1
expU += 0.5 * np.dot(U, U)
expU, r = np.linalg.qr(expU.T)
Cn = C.dot(expU)
D = np.dot(Cn[:,:nocc], Cn[:,:nocc].T)
return (Cn, D)
for SCF_ITER in range(1, maxiter + 1):
# Build the alpha & beta Fock matrices
Ja = np.einsum('pqrs,rs->pq', I, Da)
Ka = np.einsum('prqs,rs->pq', I, Da)
Jb = np.einsum('pqrs,rs->pq', I, Db)
Kb = np.einsum('prqs,rs->pq', I, Db)
Fa = H + (Ja + Jb) - Ka
Fb = H + (Ja + Jb) - Kb
# dRMS error
diisa_e = A.dot(Fa.dot(Da).dot(S) - S.dot(Da).dot(Fa)).dot(A)
diisb_e = A.dot(Fb.dot(Db).dot(S) - S.dot(Db).dot(Fb)).dot(A)
# SCF energy and update: [Szabo:1996], exercise 3.40, pp. 215
SCF_E = np.einsum('pq,pq->', Da + Db, H)
SCF_E += np.einsum('pq,pq->', Da, Fa)
SCF_E += np.einsum('pq,pq->', Db, Fb)
SCF_E *= 0.5
SCF_E += Enuc
dRMS = 0.5 * (np.mean(diisa_e**2)**0.5 + np.mean(diisb_e**2)**0.5)
print('SCF Iteration %3d: Energy = %4.16f dE = % 1.5E dRMS = %1.5E'
% (SCF_ITER, SCF_E, (SCF_E - Eold), dRMS))
if (abs(SCF_E - Eold) < E_conv) and (dRMS < D_conv):
break
Eold = SCF_E
Cocca = Ca[:, :nalpha]
Cvira = Ca[:, nalpha:]
Coccb = Cb[:, :nbeta]
Cvirb = Cb[:, nbeta:]
# Form gradients from MO Fock matrices: [Helgaker:2000] Eqn. 10.8.34, pp. 484
moFa = (Ca.T).dot(Fa).dot(Ca)
moFb = (Cb.T).dot(Fb).dot(Cb)
grada = -4 * moFa[:nalpha, nalpha:]
gradb = -4 * moFb[:nbeta, nbeta:]
# Form off diagonal contributions to Hessian
Jab = 8 * transform(I, Cocca, Cvira, Coccb, Cvirb)
# Form diagonal alpha contributions
MOaa = transform(I, Cocca, Ca, Ca, Ca)
Ha = np.einsum('ab,ij->iajb', moFa[nalpha:, nalpha:], np.diag(np.ones(nalpha)))
Ha -= np.einsum('ij,ab->iajb', moFa[:nalpha:, :nalpha], np.diag(np.ones(nbf-nalpha)))
Ha += 2 * MOaa[:, nalpha:, :nalpha, nalpha:]
Ha -= MOaa[:, nalpha:, :nalpha, nalpha:].swapaxes(0, 2)
Ha -= MOaa[:, :nalpha, nalpha:, nalpha:].swapaxes(1, 2)
Ha *= 4
# Form diagonal beta contributions
MObb = transform(I, Coccb, Cb, Cb, Cb)
Hb = np.einsum('ab,ij->iajb', moFb[nbeta:, nbeta:], np.diag(np.ones(nbeta)))
Hb -= np.einsum('ij,ab->iajb', moFb[:nbeta:, :nbeta], np.diag(np.ones(nbf-nbeta)))
Hb += 2 * MObb[:, nbeta:, :nbeta, nbeta:]
Hb -= MObb[:, nbeta:, :nbeta, nbeta:].swapaxes(0, 2)
Hb -= MObb[:, :nbeta, nbeta:, nbeta:].swapaxes(1, 2)
Hb *= 4
# Build the full Hessian matrix
na = Ha.shape[0] * Ha.shape[1]
nb = Hb.shape[0] * Hb.shape[1]
ntot = na + nb
# aa | ab
# -------
# ba | bb
Hess = np.zeros((ntot, ntot))
Hess[:na,:na] = Ha.reshape(na, na)
Hess[:na,na:] = Jab.reshape(na,nb)
Hess[na:,:na] = Jab.reshape(na,nb).T
Hess[na:,na:] = Hb.reshape(nb, nb)
# Invert hessian and obtain new vectors
Hinv = np.linalg.inv(Hess)
gradvec = np.hstack((grada.reshape(-1), gradb.reshape(-1)))
resultx = np.einsum('ij,j->i', Hinv, gradvec)
xa = resultx[:na].reshape(Ha.shape[0], Ha.shape[1])
xb = resultx[na:].reshape(Hb.shape[0], Hb.shape[1])
# Rotate the orbitals
Ca, Da = rotate_orbs(Ca, xa, nalpha)
Cb, Db = rotate_orbs(Cb, xb, nbeta)
if SCF_ITER == maxiter:
clean()
raise Exception("Maximum number of SCF cycles exceeded.")
print('Total time for SCF iterations: %.3f seconds \n' % (time.time() - t))
spin_mat = (Cb[:, :nbeta].T).dot(S).dot(Ca[:, :nalpha])
spin_contam = min(nalpha, nbeta) - np.vdot(spin_mat, spin_mat)
print('Spin Contamination Metric: %1.5E\n' % spin_contam)
print('Final SCF energy: %.8f hartree' % SCF_E)
# Compare to Psi4
psi4.set_options({'e_convergence': 1e-8,
'r_convergence': 1e-8,
'scf_type': 'pk',
'maxiter': 100})
SCF_E_psi = psi4.energy('SCF')
psi4.compare_values(SCF_E_psi, SCF_E, 6, 'SCF Energy')
| <filename>Self-Consistent-Field/SOUHF.py
"""
Unrestricted open-shell Hartree-Fock using direct second-order
convergence acceleration.
References:
- UHF equations & algorithm from [Szabo:1996]
- SO equations & algorithm from [Helgaker:2000]
"""
__authors__ = "<NAME>"
__credits__ = ["<NAME>"]
__copyright__ = "(c) 2014-2018, The Psi4NumPy Developers"
__license__ = "BSD-3-Clause"
__date__ = "2017-9-30"
import time
import numpy as np
np.set_printoptions(precision=5, linewidth=200, suppress=True)
import psi4
# Set Psi4 memory and output options
psi4.set_memory('2 GB')
psi4.core.set_output_file('output.dat', False)
# Triplet oxygen
mol = psi4.geometry("""
0 3
O
O 1 1.2
symmetry c1
""")
psi4.set_options({'basis': 'aug-cc-pvdz',
'reference': 'uhf'})
# Set defaults
maxiter = 10
E_conv = 1.0E-13
D_conv = 1.0E-13
# Integral generation from Psi4's MintsHelper
wfn = psi4.core.Wavefunction.build(mol, psi4.core.get_global_option('BASIS'))
mints = psi4.core.MintsHelper(wfn.basisset())
S = np.asarray(mints.ao_overlap())
V = np.asarray(mints.ao_potential())
T = np.asarray(mints.ao_kinetic())
# Occupations
nbf = wfn.nso()
nalpha = wfn.nalpha()
nbeta = wfn.nbeta()
if nbf > 100:
raise Exception("This has a N^4 memory overhead, killing if nbf > 100.")
H = T + V
# Orthogonalizer A = S^(-1/2)
A = mints.ao_overlap()
A.power(-0.5, 1.e-16)
A = np.asarray(A)
# ERI's
I = np.asarray(mints.ao_eri())
# Steal a good starting guess
psi4.set_options({'e_convergence': 1e-4,
'd_convergence': 1e-4,
'maxiter': 7,
'guess': 'sad'})
scf_e, wfn = psi4.energy('SCF', return_wfn=True)
Ca = np.array(wfn.Ca())
Da = np.array(wfn.Da())
Cb = np.array(wfn.Cb())
Db = np.array(wfn.Db())
nalpha = wfn.nalpha()
nbeta = wfn.nbeta()
t = time.time()
E = 0.0
Enuc = mol.nuclear_repulsion_energy()
Eold = 0.0
print('\nTotal time taken for setup: %.3f seconds' % (time.time() - t))
print('\nStart SCF iterations:\n')
t = time.time()
def transform(I, C1, C2, C3, C4):
#MO = np.einsum('pA,pqrs->Aqrs', C1, I)
nao = I.shape[0]
MO = np.dot(C1.T, I.reshape(nao, -1)).reshape(C1.shape[1], nao, nao, nao)
MO = np.einsum('qB,Aqrs->ABrs', C2, MO)
MO = np.einsum('rC,ABrs->ABCs', C3, MO)
MO = np.einsum('sD,ABCs->ABCD', C4, MO)
return MO
# Rotate orbs and produce C and D matrices
def rotate_orbs(C, x, nocc):
U = np.zeros_like(C)
U[:nocc, nocc:] = x
U[nocc:, :nocc] = -x.T
expU = U.copy()
expU[np.diag_indices_from(U)] += 1
expU += 0.5 * np.dot(U, U)
expU, r = np.linalg.qr(expU.T)
Cn = C.dot(expU)
D = np.dot(Cn[:,:nocc], Cn[:,:nocc].T)
return (Cn, D)
for SCF_ITER in range(1, maxiter + 1):
# Build the alpha & beta Fock matrices
Ja = np.einsum('pqrs,rs->pq', I, Da)
Ka = np.einsum('prqs,rs->pq', I, Da)
Jb = np.einsum('pqrs,rs->pq', I, Db)
Kb = np.einsum('prqs,rs->pq', I, Db)
Fa = H + (Ja + Jb) - Ka
Fb = H + (Ja + Jb) - Kb
# dRMS error
diisa_e = A.dot(Fa.dot(Da).dot(S) - S.dot(Da).dot(Fa)).dot(A)
diisb_e = A.dot(Fb.dot(Db).dot(S) - S.dot(Db).dot(Fb)).dot(A)
# SCF energy and update: [Szabo:1996], exercise 3.40, pp. 215
SCF_E = np.einsum('pq,pq->', Da + Db, H)
SCF_E += np.einsum('pq,pq->', Da, Fa)
SCF_E += np.einsum('pq,pq->', Db, Fb)
SCF_E *= 0.5
SCF_E += Enuc
dRMS = 0.5 * (np.mean(diisa_e**2)**0.5 + np.mean(diisb_e**2)**0.5)
print('SCF Iteration %3d: Energy = %4.16f dE = % 1.5E dRMS = %1.5E'
% (SCF_ITER, SCF_E, (SCF_E - Eold), dRMS))
if (abs(SCF_E - Eold) < E_conv) and (dRMS < D_conv):
break
Eold = SCF_E
Cocca = Ca[:, :nalpha]
Cvira = Ca[:, nalpha:]
Coccb = Cb[:, :nbeta]
Cvirb = Cb[:, nbeta:]
# Form gradients from MO Fock matrices: [Helgaker:2000] Eqn. 10.8.34, pp. 484
moFa = (Ca.T).dot(Fa).dot(Ca)
moFb = (Cb.T).dot(Fb).dot(Cb)
grada = -4 * moFa[:nalpha, nalpha:]
gradb = -4 * moFb[:nbeta, nbeta:]
# Form off diagonal contributions to Hessian
Jab = 8 * transform(I, Cocca, Cvira, Coccb, Cvirb)
# Form diagonal alpha contributions
MOaa = transform(I, Cocca, Ca, Ca, Ca)
Ha = np.einsum('ab,ij->iajb', moFa[nalpha:, nalpha:], np.diag(np.ones(nalpha)))
Ha -= np.einsum('ij,ab->iajb', moFa[:nalpha:, :nalpha], np.diag(np.ones(nbf-nalpha)))
Ha += 2 * MOaa[:, nalpha:, :nalpha, nalpha:]
Ha -= MOaa[:, nalpha:, :nalpha, nalpha:].swapaxes(0, 2)
Ha -= MOaa[:, :nalpha, nalpha:, nalpha:].swapaxes(1, 2)
Ha *= 4
# Form diagonal beta contributions
MObb = transform(I, Coccb, Cb, Cb, Cb)
Hb = np.einsum('ab,ij->iajb', moFb[nbeta:, nbeta:], np.diag(np.ones(nbeta)))
Hb -= np.einsum('ij,ab->iajb', moFb[:nbeta:, :nbeta], np.diag(np.ones(nbf-nbeta)))
Hb += 2 * MObb[:, nbeta:, :nbeta, nbeta:]
Hb -= MObb[:, nbeta:, :nbeta, nbeta:].swapaxes(0, 2)
Hb -= MObb[:, :nbeta, nbeta:, nbeta:].swapaxes(1, 2)
Hb *= 4
# Build the full Hessian matrix
na = Ha.shape[0] * Ha.shape[1]
nb = Hb.shape[0] * Hb.shape[1]
ntot = na + nb
# aa | ab
# -------
# ba | bb
Hess = np.zeros((ntot, ntot))
Hess[:na,:na] = Ha.reshape(na, na)
Hess[:na,na:] = Jab.reshape(na,nb)
Hess[na:,:na] = Jab.reshape(na,nb).T
Hess[na:,na:] = Hb.reshape(nb, nb)
# Invert hessian and obtain new vectors
Hinv = np.linalg.inv(Hess)
gradvec = np.hstack((grada.reshape(-1), gradb.reshape(-1)))
resultx = np.einsum('ij,j->i', Hinv, gradvec)
xa = resultx[:na].reshape(Ha.shape[0], Ha.shape[1])
xb = resultx[na:].reshape(Hb.shape[0], Hb.shape[1])
# Rotate the orbitals
Ca, Da = rotate_orbs(Ca, xa, nalpha)
Cb, Db = rotate_orbs(Cb, xb, nbeta)
if SCF_ITER == maxiter:
clean()
raise Exception("Maximum number of SCF cycles exceeded.")
print('Total time for SCF iterations: %.3f seconds \n' % (time.time() - t))
spin_mat = (Cb[:, :nbeta].T).dot(S).dot(Ca[:, :nalpha])
spin_contam = min(nalpha, nbeta) - np.vdot(spin_mat, spin_mat)
print('Spin Contamination Metric: %1.5E\n' % spin_contam)
print('Final SCF energy: %.8f hartree' % SCF_E)
# Compare to Psi4
psi4.set_options({'e_convergence': 1e-8,
'r_convergence': 1e-8,
'scf_type': 'pk',
'maxiter': 100})
SCF_E_psi = psi4.energy('SCF')
psi4.compare_values(SCF_E_psi, SCF_E, 6, 'SCF Energy')
| en | 0.650744 | Unrestricted open-shell Hartree-Fock using direct second-order convergence acceleration. References: - UHF equations & algorithm from [Szabo:1996] - SO equations & algorithm from [Helgaker:2000] # Set Psi4 memory and output options # Triplet oxygen 0 3 O O 1 1.2 symmetry c1 # Set defaults # Integral generation from Psi4's MintsHelper # Occupations # Orthogonalizer A = S^(-1/2) # ERI's # Steal a good starting guess #MO = np.einsum('pA,pqrs->Aqrs', C1, I) # Rotate orbs and produce C and D matrices # Build the alpha & beta Fock matrices # dRMS error # SCF energy and update: [Szabo:1996], exercise 3.40, pp. 215 # Form gradients from MO Fock matrices: [Helgaker:2000] Eqn. 10.8.34, pp. 484 # Form off diagonal contributions to Hessian # Form diagonal alpha contributions # Form diagonal beta contributions # Build the full Hessian matrix # aa | ab # ------- # ba | bb # Invert hessian and obtain new vectors # Rotate the orbitals # Compare to Psi4 | 1.980783 | 2 |
tests/test_storages.py | climbus/chkapi | 0 | 6631848 | <filename>tests/test_storages.py
import os
import tempfile
import pytest
from chkapi.storages import STORAGE_FILE_NAME, TempFileStorage
@pytest.fixture(autouse=True)
def set_tmpdir(tmp_path):
tempfile.tempdir = None
os.environ["TMPDIR"] = str(tmp_path)
@pytest.mark.asyncio
async def test_save_url(tmp_path):
url = "http://localhost/\n"
storage = TempFileStorage()
await storage.save(url)
with open(tmp_path / STORAGE_FILE_NAME) as fp:
assert fp.readline() == url
@pytest.mark.asyncio
async def test_saves_only_unique_urls(tmp_path):
url1 = "http://localhost/"
url2 = "http://127.0.0.1/"
storage = TempFileStorage()
await storage.save(url1)
await storage.save(url1)
await storage.save(url2)
with open(tmp_path / STORAGE_FILE_NAME) as fp:
assert fp.read() == f"{url2}\n{url1}"
@pytest.mark.asyncio
async def test_search_in_file(tmp_path):
with open(tmp_path / STORAGE_FILE_NAME, "w") as fp:
fp.write("http://localhost/\nhttp://127.0.0.1")
storage = TempFileStorage()
result = await storage.find("local")
assert result == ["http://localhost/"]
| <filename>tests/test_storages.py
import os
import tempfile
import pytest
from chkapi.storages import STORAGE_FILE_NAME, TempFileStorage
@pytest.fixture(autouse=True)
def set_tmpdir(tmp_path):
tempfile.tempdir = None
os.environ["TMPDIR"] = str(tmp_path)
@pytest.mark.asyncio
async def test_save_url(tmp_path):
url = "http://localhost/\n"
storage = TempFileStorage()
await storage.save(url)
with open(tmp_path / STORAGE_FILE_NAME) as fp:
assert fp.readline() == url
@pytest.mark.asyncio
async def test_saves_only_unique_urls(tmp_path):
url1 = "http://localhost/"
url2 = "http://127.0.0.1/"
storage = TempFileStorage()
await storage.save(url1)
await storage.save(url1)
await storage.save(url2)
with open(tmp_path / STORAGE_FILE_NAME) as fp:
assert fp.read() == f"{url2}\n{url1}"
@pytest.mark.asyncio
async def test_search_in_file(tmp_path):
with open(tmp_path / STORAGE_FILE_NAME, "w") as fp:
fp.write("http://localhost/\nhttp://127.0.0.1")
storage = TempFileStorage()
result = await storage.find("local")
assert result == ["http://localhost/"]
| none | 1 | 2.401153 | 2 |
|
certbot-nginx/certbot_nginx/tests/tls_sni_01_test.py | tsrivishnu/certbot | 1 | 6631849 | <filename>certbot-nginx/certbot_nginx/tests/tls_sni_01_test.py
"""Tests for certbot_nginx.tls_sni_01"""
import unittest
import shutil
import mock
import six
from acme import challenges
from certbot import achallenges
from certbot import errors
from certbot.plugins import common_test
from certbot.tests import acme_util
from certbot_nginx import obj
from certbot_nginx.tests import util
class TlsSniPerformTest(util.NginxTest):
"""Test the NginxTlsSni01 challenge."""
account_key = common_test.AUTH_KEY
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(token=b"<PASSWORD>"), "pending"),
domain="www.example.com", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(
token=b"\<KEY>"
b"\x80\xe2_X\t\xe7\xc7\xa4\t\xca\xf7&\x945"
), "pending"),
domain="another.alias", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(
token=b"\<KEY>"
b"\xeb9\xf1\xf5\xb9\xefVM\xc9w\xa4u\x9c\xe1\x87\xb4"
), "pending"),
domain="www.example.org", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(token=b"kNd<PASSWORD>0I_A8DXt9Msmg"), "pending"),
domain="sslon.com", account_key=account_key),
]
def setUp(self):
super(TlsSniPerformTest, self).setUp()
config = util.get_nginx_configurator(
self.config_path, self.config_dir, self.work_dir, self.logs_dir)
from certbot_nginx import tls_sni_01
self.sni = tls_sni_01.NginxTlsSni01(config)
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
@mock.patch("certbot_nginx.configurator"
".NginxConfigurator.choose_vhosts")
def test_perform(self, mock_choose):
self.sni.add_chall(self.achalls[1])
mock_choose.return_value = []
result = self.sni.perform()
self.assertFalse(result is None)
def test_perform0(self):
responses = self.sni.perform()
self.assertEqual([], responses)
@mock.patch("certbot_nginx.configurator.NginxConfigurator.save")
def test_perform1(self, mock_save):
self.sni.add_chall(self.achalls[0])
response = self.achalls[0].response(self.account_key)
mock_setup_cert = mock.MagicMock(return_value=response)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
responses = self.sni.perform()
mock_setup_cert.assert_called_once_with(self.achalls[0])
self.assertEqual([response], responses)
self.assertEqual(mock_save.call_count, 1)
# Make sure challenge config is included in main config
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(
util.contains_at_depth(http, ['include', self.sni.challenge_conf], 1))
def test_perform2(self):
acme_responses = []
for achall in self.achalls:
self.sni.add_chall(achall)
acme_responses.append(achall.response(self.account_key))
mock_setup_cert = mock.MagicMock(side_effect=acme_responses)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
sni_responses = self.sni.perform()
self.assertEqual(mock_setup_cert.call_count, 4)
for index, achall in enumerate(self.achalls):
self.assertEqual(
mock_setup_cert.call_args_list[index], mock.call(achall))
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(['include', self.sni.challenge_conf] in http[1])
self.assertFalse(
util.contains_at_depth(http, ['server_name', 'another.alias'], 3))
self.assertEqual(len(sni_responses), 4)
for i in six.moves.range(4):
self.assertEqual(sni_responses[i], acme_responses[i])
def test_mod_config(self):
self.sni.add_chall(self.achalls[0])
self.sni.add_chall(self.achalls[2])
v_addr1 = [obj.Addr("172.16.31.10", "9000", True, False, False, False),
obj.Addr("127.0.0.1", "", False, False, False, False)]
v_addr2 = [obj.Addr("myhost", "", False, True, False, False)]
v_addr2_print = [obj.Addr("myhost", "", False, False, False, False)]
ll_addr = [v_addr1, v_addr2]
self.sni._mod_config(ll_addr) # pylint: disable=protected-access
self.sni.configurator.save()
self.sni.configurator.parser.load()
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(['include', self.sni.challenge_conf] in http[1])
vhosts = self.sni.configurator.parser.get_vhosts()
vhs = [vh for vh in vhosts if vh.filep == self.sni.challenge_conf]
for vhost in vhs:
if vhost.addrs == set(v_addr1):
response = self.achalls[0].response(self.account_key)
else:
response = self.achalls[2].response(self.account_key)
self.assertEqual(vhost.addrs, set(v_addr2_print))
self.assertEqual(vhost.names, set([response.z_domain.decode('ascii')]))
self.assertEqual(len(vhs), 2)
def test_mod_config_fail(self):
root = self.sni.configurator.parser.config_root
self.sni.configurator.parser.parsed[root] = [['include', 'foo.conf']]
# pylint: disable=protected-access
self.assertRaises(
errors.MisconfigurationError, self.sni._mod_config, [])
if __name__ == "__main__":
unittest.main() # pragma: no cover
| <filename>certbot-nginx/certbot_nginx/tests/tls_sni_01_test.py
"""Tests for certbot_nginx.tls_sni_01"""
import unittest
import shutil
import mock
import six
from acme import challenges
from certbot import achallenges
from certbot import errors
from certbot.plugins import common_test
from certbot.tests import acme_util
from certbot_nginx import obj
from certbot_nginx.tests import util
class TlsSniPerformTest(util.NginxTest):
"""Test the NginxTlsSni01 challenge."""
account_key = common_test.AUTH_KEY
achalls = [
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(token=b"<PASSWORD>"), "pending"),
domain="www.example.com", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(
token=b"\<KEY>"
b"\x80\xe2_X\t\xe7\xc7\xa4\t\xca\xf7&\x945"
), "pending"),
domain="another.alias", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(
token=b"\<KEY>"
b"\xeb9\xf1\xf5\xb9\xefVM\xc9w\xa4u\x9c\xe1\x87\xb4"
), "pending"),
domain="www.example.org", account_key=account_key),
achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.chall_to_challb(
challenges.TLSSNI01(token=b"kNd<PASSWORD>0I_A8DXt9Msmg"), "pending"),
domain="sslon.com", account_key=account_key),
]
def setUp(self):
super(TlsSniPerformTest, self).setUp()
config = util.get_nginx_configurator(
self.config_path, self.config_dir, self.work_dir, self.logs_dir)
from certbot_nginx import tls_sni_01
self.sni = tls_sni_01.NginxTlsSni01(config)
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.config_dir)
shutil.rmtree(self.work_dir)
@mock.patch("certbot_nginx.configurator"
".NginxConfigurator.choose_vhosts")
def test_perform(self, mock_choose):
self.sni.add_chall(self.achalls[1])
mock_choose.return_value = []
result = self.sni.perform()
self.assertFalse(result is None)
def test_perform0(self):
responses = self.sni.perform()
self.assertEqual([], responses)
@mock.patch("certbot_nginx.configurator.NginxConfigurator.save")
def test_perform1(self, mock_save):
self.sni.add_chall(self.achalls[0])
response = self.achalls[0].response(self.account_key)
mock_setup_cert = mock.MagicMock(return_value=response)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
responses = self.sni.perform()
mock_setup_cert.assert_called_once_with(self.achalls[0])
self.assertEqual([response], responses)
self.assertEqual(mock_save.call_count, 1)
# Make sure challenge config is included in main config
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(
util.contains_at_depth(http, ['include', self.sni.challenge_conf], 1))
def test_perform2(self):
acme_responses = []
for achall in self.achalls:
self.sni.add_chall(achall)
acme_responses.append(achall.response(self.account_key))
mock_setup_cert = mock.MagicMock(side_effect=acme_responses)
# pylint: disable=protected-access
self.sni._setup_challenge_cert = mock_setup_cert
sni_responses = self.sni.perform()
self.assertEqual(mock_setup_cert.call_count, 4)
for index, achall in enumerate(self.achalls):
self.assertEqual(
mock_setup_cert.call_args_list[index], mock.call(achall))
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(['include', self.sni.challenge_conf] in http[1])
self.assertFalse(
util.contains_at_depth(http, ['server_name', 'another.alias'], 3))
self.assertEqual(len(sni_responses), 4)
for i in six.moves.range(4):
self.assertEqual(sni_responses[i], acme_responses[i])
def test_mod_config(self):
self.sni.add_chall(self.achalls[0])
self.sni.add_chall(self.achalls[2])
v_addr1 = [obj.Addr("172.16.31.10", "9000", True, False, False, False),
obj.Addr("127.0.0.1", "", False, False, False, False)]
v_addr2 = [obj.Addr("myhost", "", False, True, False, False)]
v_addr2_print = [obj.Addr("myhost", "", False, False, False, False)]
ll_addr = [v_addr1, v_addr2]
self.sni._mod_config(ll_addr) # pylint: disable=protected-access
self.sni.configurator.save()
self.sni.configurator.parser.load()
http = self.sni.configurator.parser.parsed[
self.sni.configurator.parser.config_root][-1]
self.assertTrue(['include', self.sni.challenge_conf] in http[1])
vhosts = self.sni.configurator.parser.get_vhosts()
vhs = [vh for vh in vhosts if vh.filep == self.sni.challenge_conf]
for vhost in vhs:
if vhost.addrs == set(v_addr1):
response = self.achalls[0].response(self.account_key)
else:
response = self.achalls[2].response(self.account_key)
self.assertEqual(vhost.addrs, set(v_addr2_print))
self.assertEqual(vhost.names, set([response.z_domain.decode('ascii')]))
self.assertEqual(len(vhs), 2)
def test_mod_config_fail(self):
root = self.sni.configurator.parser.config_root
self.sni.configurator.parser.parsed[root] = [['include', 'foo.conf']]
# pylint: disable=protected-access
self.assertRaises(
errors.MisconfigurationError, self.sni._mod_config, [])
if __name__ == "__main__":
unittest.main() # pragma: no cover
| en | 0.512874 | Tests for certbot_nginx.tls_sni_01 Test the NginxTlsSni01 challenge. # pylint: disable=protected-access # Make sure challenge config is included in main config # pylint: disable=protected-access # pylint: disable=protected-access # pylint: disable=protected-access # pragma: no cover | 2.248626 | 2 |
Dynamic_Programming/Decibinary_Numbers.py | NikolayVaklinov10/Interview_Preparation_Kit | 0 | 6631850 | <gh_stars>0
import os
from collections import defaultdict
from bisect import bisect_right
MAX_NUMBERS = 285113
MAX_ITER = 10
MAX_DIGIT = 19
def get_dec(x):
result, i = 0, 0
while x > 0:
result += (x % 10) * 2 ** i
x, i = x // 10, i + 1
return result
def get_matrix(d, j, positions):
i_for_sum, max_num = 2 ** j, 5 ** (j - 1)
max_num_last_index, direction = None, 1
last_el = get_dec(10 ** j - 1)
shift = 2 ** (j - 1)
for i in range(shift, len(d)):
if direction > 0:
d[i] += d[i - shift]
if d[i] == max_num:
direction, max_num_last_index = 0, last_el - i
elif direction == 0:
d[i] = max_num
if i == max_num_last_index:
direction = -1
else:
d[i] = d[last_el - i]
if i < i_for_sum:
positions.append(positions[- 1] + d[i])
return d, positions
def magic_func(result_in_dec, inner_shift, is_first, dcts, result_max_length, result):
if (result_max_length <= 0):
return result
double_shift, sum_bits = 1 << (result_max_length - 1), 0
for i in range(0, MAX_ITER):
if i * double_shift <= result_in_dec and (sum_bits + dcts[result_max_length - 1][result_in_dec - i * double_shift] <= inner_shift):
sum_bits += dcts[result_max_length - 1][result_in_dec - i * double_shift]
else:
if not is_first or i > 0:
result += str(i)
result = magic_func(result_in_dec - i * double_shift, inner_shift - sum_bits, is_first and i == 0, dcts, result_max_length - 1, result)
return result
return result
def decibinaryNumbers(x, dct, dcts, positions):
if x <= 1:
return 0
result_in_dec = bisect_right(positions, x) - 1
inner_shift = x - positions[result_in_dec]
result = magic_func(result_in_dec, inner_shift, True, dcts, result_max_length=len(bin(result_in_dec)[2:]), result='')
return result
def start():
dct = [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 3, 3, 2, 2, 1, 1] + [0] * (MAX_NUMBERS - 28) # 4
dcts = [[1] + [0] * (MAX_NUMBERS - 1), [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + [0] * (MAX_NUMBERS - 10), dct]
j, positions = 2, [1, 2, 3, 5, 7]
while j < MAX_DIGIT:
j += 1
dct, positions = get_matrix(dct[:], j, positions)
dcts.append(dct)
return dct, dcts, positions
if __name__ == '__main__':
dct, dcts, positions = start()
fptr = open(os.environ['OUTPUT_PATH'], 'w')
q = int(input())
for q_itr in range(q):
x = int(input())
result = decibinaryNumbers(x, dct, dcts, positions)
fptr.write(str(result) + '\n')
fptr.close()
| import os
from collections import defaultdict
from bisect import bisect_right
MAX_NUMBERS = 285113
MAX_ITER = 10
MAX_DIGIT = 19
def get_dec(x):
result, i = 0, 0
while x > 0:
result += (x % 10) * 2 ** i
x, i = x // 10, i + 1
return result
def get_matrix(d, j, positions):
i_for_sum, max_num = 2 ** j, 5 ** (j - 1)
max_num_last_index, direction = None, 1
last_el = get_dec(10 ** j - 1)
shift = 2 ** (j - 1)
for i in range(shift, len(d)):
if direction > 0:
d[i] += d[i - shift]
if d[i] == max_num:
direction, max_num_last_index = 0, last_el - i
elif direction == 0:
d[i] = max_num
if i == max_num_last_index:
direction = -1
else:
d[i] = d[last_el - i]
if i < i_for_sum:
positions.append(positions[- 1] + d[i])
return d, positions
def magic_func(result_in_dec, inner_shift, is_first, dcts, result_max_length, result):
if (result_max_length <= 0):
return result
double_shift, sum_bits = 1 << (result_max_length - 1), 0
for i in range(0, MAX_ITER):
if i * double_shift <= result_in_dec and (sum_bits + dcts[result_max_length - 1][result_in_dec - i * double_shift] <= inner_shift):
sum_bits += dcts[result_max_length - 1][result_in_dec - i * double_shift]
else:
if not is_first or i > 0:
result += str(i)
result = magic_func(result_in_dec - i * double_shift, inner_shift - sum_bits, is_first and i == 0, dcts, result_max_length - 1, result)
return result
return result
def decibinaryNumbers(x, dct, dcts, positions):
if x <= 1:
return 0
result_in_dec = bisect_right(positions, x) - 1
inner_shift = x - positions[result_in_dec]
result = magic_func(result_in_dec, inner_shift, True, dcts, result_max_length=len(bin(result_in_dec)[2:]), result='')
return result
def start():
dct = [1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 3, 3, 2, 2, 1, 1] + [0] * (MAX_NUMBERS - 28) # 4
dcts = [[1] + [0] * (MAX_NUMBERS - 1), [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + [0] * (MAX_NUMBERS - 10), dct]
j, positions = 2, [1, 2, 3, 5, 7]
while j < MAX_DIGIT:
j += 1
dct, positions = get_matrix(dct[:], j, positions)
dcts.append(dct)
return dct, dcts, positions
if __name__ == '__main__':
dct, dcts, positions = start()
fptr = open(os.environ['OUTPUT_PATH'], 'w')
q = int(input())
for q_itr in range(q):
x = int(input())
result = decibinaryNumbers(x, dct, dcts, positions)
fptr.write(str(result) + '\n')
fptr.close() | none | 1 | 2.792154 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.