metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jorgenavarroortiz/multitechnology_testbed_v0",
"score": 2
} |
#### File: MPTCP_kernel5.5_WRR05_RPi4/mptcp_ctrl/mptcp_wrr_controller.py
```python
import ipaddress
import subprocess
import socket
import struct
import time
import os
def get_mptcp_telemetry(subflows):
telemetry = []
filter = []
for subflow in subflows:
filter.append("( src " + subflow["local_ip"] + ":" + str(subflow["local_port"]) + " and dst " + subflow[
"remote_ip"] + ":" + str(subflow["remote_port"]) + " )")
filters = " or ".join(filter)
# print("> ss -nite \"" + filters+"\"")
stream = os.popen("ss -nite \"" + filters + "\"")
lines = stream.readlines()
for i in range(1, len(lines), 2):
sample = {}
lines[i] = lines[i] + lines[i + 1] # the -e parameter creates 2 lines.
# print(lines[i].split())
tuples = lines[i].split()
sample["timestamp"]=time.time()
#sample["src"] = tuples[3] # src
#sample["dst"] = tuples[4] # dst
j = 0
while j < len(tuples):
values_ = tuples[j].split(":")
if len(values_) > 1:
if j==3:
src_ip_=values_[0].split("%")
sample["src_ip"]=src_ip_[0]
sample["src_port"]=values_[1]
elif j==4:
sample["dst_ip"]=values_[0]
sample["dst_port"]=values_[1]
elif values_[0] == "rtt":
rtt_ = values_[1].split("/")
sample["rtt"] = float(rtt_[0])
sample["rtt_var"] = float(rtt_[1])
else:
label = values_[0]
value = values_[1]
if values_[0] == "ino":
label = "inode"
value = int(values_[1])
sample[label] = value # TODO change to int/float if needed
else:
if values_[0] == "send":
bps=int(tuples[j + 1][:-3]) # we want to remove the "bps" substring
sample["send_rate"] = bps
# sample["send_"]=tuples[j + 1]
j = j + 1
elif values_[0] == "cubic":
sample["con_alg"] = values_[0]
j = j + 1
telemetry.append(sample)
return telemetry
def proc_net_address_to_host(address):
address_long = int(address, 16)
return socket.inet_ntoa(struct.pack("<I", address_long))
# TODO check little endian?
def proc_net_port_to_host(port):
return int(port, 16)
def get_mptcp_subflows_from_inode(inode, _proc_tcp_path="/proc/net/tcp"):
socket_list = []
with open(_proc_tcp_path, "rt") as file:
lines = file.readlines()
i = 0
# Let's parse the line:
for line in lines:
# First line is a header.
if i > 0:
values = line.split()
# print(values)
_inode = int(values[9])
if _inode == inode:
address = values[1].split(":")
local_address = proc_net_address_to_host(address[0])
local_port = proc_net_port_to_host(address[1])
address = values[2].split(":")
remote_address = proc_net_address_to_host(address[0])
remote_port = proc_net_port_to_host(address[1])
socket_list.append(
{"local_ip": local_address, "local_port": local_port, "remote_ip": remote_address,
"remote_port": remote_port})
i = i + 1
return socket_list
def set_mptcp_scheduler(scheduler="default",_path="net.mptcp.mptcp_scheduler"):
return execute_sysctl_command("-w "+_path+"="+scheduler)
def get_mptcp_current_scheduler():
return execute_sysctl_read_command("net.mptcp.mptcp_scheduler")
def get_mptcp_socket_scheduler(inode):
scheduler=None
socket=get_mptcp_socket(inode)
if socket!=None:
scheduler=socket["scheduler"]
return scheduler
def get_mptcp_socket(inode):
socket=None
socket_list= get_mptcp_sockets()
for socket_ in socket_list:
if socket_["inode"]==inode:
socket=socket_
return socket
def get_mptcp_sockets(_proc_mptcp_path="/proc/net/mptcp_net/mptcp"):
socket_list = []
with open(_proc_mptcp_path, "rt") as file:
lines = file.readlines()
i = 0
# Let's parse the line:
for line in lines:
# First line is a header.
if i > 0:
values = line.split()
# print(values)
address = values[4].split(":")
local_address = proc_net_address_to_host(address[0])
local_port = proc_net_port_to_host(address[1])
address = values[5].split(":")
remote_address = proc_net_address_to_host(address[0])
remote_port = proc_net_port_to_host(address[1])
socket_list.append({"inode": int(values[9]), "local_ip": local_address, "local_port": local_port,
"remote_ip": remote_address, "remote_port": remote_port, "scheduler": values[10]})
i = i + 1
return socket_list
# to modify, so it returns a result code.
def execute_sysctl_command(params):
return os.system('sysctl ' + params)
#result = subprocess.run('sysctl ' + params)
#return result.returncode
def ip_string_to_unsigned_int(ip):
ip_ = 0
bytes_ = ip.split(".")
if len(bytes_) == 4:
ip_ = socket.htonl((int(bytes_[0]) << 24) + (int(bytes_[1]) << 16) + (int(bytes_[2]) << 8) + int(bytes_[3]))
return ip_
def generate_sysctl_params_string_apiv03(rules):
sysctl_params = ""
for rule in rules:
src_ip = "0"
dst_ip = "0"
src_port = "0"
dst_port = "0"
weight = "0"
if "src_ip" in rule:
src_ip = str(ip_string_to_unsigned_int(rule["src_ip"]))
if "dst_ip" in rule:
dst_ip = str(ip_string_to_unsigned_int(rule["dst_ip"]))
if "src_port" in rule:
src_port = str(socket.htons(rule["src_port"]))
if "dst_port" in rule:
src_port = str(socket.htons(rule["dst_port"]))
if "weight" in rule:
weight = str(rule["weight"])
sysctl_params = sysctl_params + src_ip + " " + dst_ip + " " + weight + " " + src_port + " " + dst_port + " "
sysctl_params = sysctl_params.strip()
return sysctl_params
def generate_sysctl_params_string(ips_weights_dictionary):
sysctl_params = ""
for ip in ips_weights_dictionary:
value = ips_weights_dictionary[ip]
sysctl_params = sysctl_params + str(ip_string_to_unsigned_int(ip)) + " " + str(value) + " "
sysctl_params = sysctl_params.strip()
return sysctl_params
def generate_sysctl_port_params_string(ips_weights_dictionary):
sysctl_params = ""
for ip in ips_weights_dictionary:
value = ips_weights_dictionary[ip]
sysctl_params = sysctl_params + str(ip_string_to_unsigned_int(ip)) + " " + str(socket.htons(value)) + " "
sysctl_params = sysctl_params.strip()
return sysctl_params
def execute_sysctl_read_command(params):
stream = os.popen("sysctl " + params)
return stream.readline()
def set_local_interfaces_rules(rules):
sysctl_params = generate_sysctl_params_string_apiv03(rules)
execute_sysctl_command("-w net.mptcp.mptcp_wrr_li_weights=\"" + sysctl_params + "\"")
def set_local_interfaces_weights(ips_weights_dictionary):
sysctl_params = generate_sysctl_params_string(ips_weights_dictionary)
execute_sysctl_command("-w net.mptcp.mptcp_wrr_li_weights=\"" + sysctl_params + "\"")
def set_remote_interfaces_weights(ips_weights_dictionary):
sysctl_params = generate_sysctl_params_string(ips_weights_dictionary)
execute_sysctl_command("-w net.mptcp.mptcp_wrr_ri_weights=\"" + sysctl_params + "\"")
def set_remote_interfaces_ports(ips_ports_dictionary):
sysctl_params = generate_sysctl_port_params_string(ips_ports_dictionary)
execute_sysctl_command("-w net.mptcp.mptcp_wrr_ri_port=\"" + sysctl_params + "\"")
def set_local_interfaces_ports(ips_ports_dictionary):
sysctl_params = generate_sysctl_port_params_string(ips_ports_dictionary)
execute_sysctl_command("-w net.mptcp.mptcp_wrr_li_port=\"" + sysctl_params + "\"")
def get_remote_interfaces_weights():
return get_sysctl_pair_ip_value("net.mptcp.mptcp_wrr_ri_weights", default_value=1)
def get_srtt_values():
return get_sysctl_pair_ip_value("net.mptcp.mptcp_wrr_srtt", default_value=-1)
def get_cwnd_values():
return get_sysctl_pair_ip_value("net.mptcp.mptcp_wrr_cwnd", default_value=-1)
def get_sysctl_pair_ip_value(sysctl_param, default_value=-1):
values = {}
output = execute_sysctl_read_command(sysctl_param)
# output="net.mptcp.mptcp_wrr_li_weights = 335544330 1 0 0 0 0 0 0"
words = output.split("=")
params = words[1].replace('\t', ' ')
params = params.strip(" \t\n")
params = params.split(' ')
params = list(filter(''.__ne__, params)) # filters all "" occurrences (__ne__ => not equal)
if len(params) < 2:
values = {}
else:
for i in range(0, len(params) - 1, 2):
if params[i] != "0":
value = default_value
if i + 1 < len(params):
value = params[i + 1]
ip = format(ipaddress.IPv4Address(socket.ntohl(int(params[i].strip()))))
values[ip] = int(value)
return values
def get_local_interfaces_rules(sysctl_param="net.mptcp.mptcp_wrr_li_weights", default_value=-1):
output = execute_sysctl_read_command(sysctl_param)
# output="net.mptcp.mptcp_wrr_li_weights = 335544330 1 0 0 0 0 0 0"
words = output.split("=")
params = words[1].replace('\t', ' ')
params = params.strip(" \t\n")
params = params.split(' ')
params = list(filter(''.__ne__, params)) # filters all "" occurrences (__ne__ => not equal)
values = []
if len(params) < 5:
values_ = {}
else:
for i in range(0, len(params) - 1, 5):
values_ = {}
if params[i] != "0":
value = default_value
values_["src_ip"] = format(ipaddress.IPv4Address(socket.ntohl(int(params[i].strip()))))
values_["dst_ip"] = format(ipaddress.IPv4Address(socket.ntohl(int(params[i + 1].strip()))))
values_["weight"] = int(params[i + 2].strip())
values_["src_port"] = socket.ntohs(int(params[i + 3].strip()))
values_["dst_port"] = socket.ntohs(int(params[i + 4].strip()))
values.append(values_)
return values
def get_local_interfaces_weights():
# weights = {}
# output = execute_sysctl_read_command("net.mptcp.mptcp_wrr_li_weights")
# # output="net.mptcp.mptcp_wrr_li_weights = 335544330 1 0 0 0 0 0 0"
#
# words = output.split("=")
#
# params = words[1].replace('\t', ' ')
# params = params.strip(" \t\n")
# params = params.split(' ')
# params = list(filter(''.__ne__, params)) # filters all "" occurrences (__ne__ => not equal)
#
# if len(params) < 2:
# weights = {}
# else:
# for i in range(0, len(params) - 1, 2):
# if params[i] != "0":
# weight = 1
# if i + 1 < len(params):
# weight = params[i + 1]
#
# ip = format(ipaddress.IPv4Address(int(params[i].strip())))
# weights[ip] = weight
#
# return weights
return get_sysctl_pair_ip_value("net.mptcp.mptcp_wrr_li_weights", default_value=1)
```
#### File: MPTCP_kernel5.5_WRR05/v07/wrr_get_mptcp_metrics_monitor.py
```python
import time
import sys
import mptcp_wrr_controller as wrr
def main():
# Warning! It must be called from the same name space that the mptcp socket.
# we get the list of open MPTCP sockets:
mptcp_sockets=wrr.get_mptcp_sockets()
# For each socket
for mptcp_socket in mptcp_sockets:
# We get the identifier of this socket (its inode)
inode=mptcp_socket["inode"]
print("MPTCP socket inode "+str(inode)+" ("+mptcp_socket["scheduler"]+")")
# We can get subflows assigned to a mptcp socket, by its inode:
mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode)
# Now, we can get the telemetry for the flows specified in a list:
telemetry=wrr.get_mptcp_telemetry(mptcp_subflows)
# We can print the telemetry of these subflows:
print("Telemetry collection:")
for sample in telemetry:
print()
# print("Telemetry for subflow "+sample["local_ip"]+":"+str(sample["local_port"])+"->"+sample["remote_ip"]+":"+str(sample["remote_port"])+":")
print("- Average Round Trip Time: "+str(sample["rtt"]))
print("- Round Trip Time Variance: "+str(sample["rtt_var"]))
print("- Minimum RTT: "+str(sample["minrtt"]))
print("- Sent/retransmitted/Delivered bytes: "+str(sample["bytes_sent"])+"/"+str(sample["bytes_retrans"])+"/"+str(sample["bytes_acked"]))
print("- Average sending bitrate: "+str(sample["send_rate"]))
print("- Congestion Window: "+str(sample["cwnd"]))
if __name__ == '__main__':
main()
```
#### File: app/cpe/main.py
```python
import sys
# sys.path.insert(1, '/home/vagrant/vagrant/MPTCP_kernel5.5_WRR05/mptcp_ctrl')
# import mptcp_wrr_controller as wrr
from typing import Optional
from pydantic import BaseModel
from typing import List
from fastapi import FastAPI
from fastapi.responses import RedirectResponse
from fastapi.middleware.cors import CORSMiddleware
# from api_v1.api import api_router
from fastapi.openapi.docs import (
get_redoc_html,
get_swagger_ui_html,
)
from fastapi.staticfiles import StaticFiles
import subprocess
import os
import re
import json
import time
import itertools
from collections import Iterable
tags_metadata = [
{
"name":"telemetry",
"description": "endpoints for telemetry data"
},
{
"name":"MPTCP",
"description": "endpoints related to MPTCP"
},
{
"name":"services",
"description": "endpoints related to services"
},
{
"name":"OVS",
"description": "endpoints related to OVS"
},
{
"name":"VLAN",
"description": "endpoints related to VLAN"
},
{
"name":"Flow rules",
"description": "endpoints related to flow rules"
},
{
"name":"Utils",
"description": "endpoints related to other tools"
},
]
app = FastAPI(
title="5G-CLARITY Testbed v1 Demo: OVS scenario",
description="Documentation",
version="0.1.0",
docs_url=None,
redoc_url=None,
openapi_tags=tags_metadata
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.mount("/static", StaticFiles(directory="static"), name="static")
#######################
#
# jjramos 04/0/2021
#
print("Reading configuration file...")
config={}
namespace=None
with open("cpe.cfg") as json_file:
config=json.load(json_file)
if "wrr_library_path" in config:
sys.path.insert(1, config["wrr_library_path"]) # jjramos, 2/7/2021 '/home/mptcp/v06'
import mptcp_wrr_controller as wrr
print("WRR library path... "+config["wrr_library_path"])
if "sockets" in config:
if "namespace" in config["sockets"]:
namespace=config["sockets"]["namespace"]
print("Sockets namespace... "+namespace)
#######################
# app.include_router(api_router)
class IfName(BaseModel):
status: str
list_if: List[dict]
class Rules(BaseModel):
rule: List[dict]
class Config:
schema_extra = {
"example": {
"rule": [{"src_ip":"10.1.1.1", "weight":1},{"src_ip":"10.1.1.2", "weight":1},{"src_ip":"10.1.1.3", "weight":1}]
}
}
class Trunk(BaseModel):
interface: str
vlanid: List[int]
class Config:
schema_extra = {
"example": {
"interface": "eth4",
"vlanid": [100, 200, 300]
}
}
class Tuple(BaseModel):
ip_source: str
ip_destination: str
port_source: Optional[str] = None
port_destination: Optional[str] = None
protocol: Optional[str] = None
proxy: Optional[int] = None
class Config:
schema_extra = {
"example": {
"ip_source": "192.168.127.12",
"ip_destination": "172.16.31.10",
"proxy": 1,
}
}
class Delay(BaseModel):
path: int
delay: Optional[float] = 0
class Config:
schema_extra = {
"example": {
"path": 1,
"delay": 20,
}
}
class Services(BaseModel):
services: List[dict]
class Config:
schema_extra = {
"example":
[{"redundant":{"enabled":True}},{"low-delay":{"enabled":False}},{"weighted-round-robin":{"enabled":True}}]
}
class ServiceSockets(BaseModel):
services: List[dict]
class Config:
schema_extra = {
"example":
[{"inode":12313}]
}
@app.get("/docs", include_in_schema=False)
async def custom_swagger_ui_html():
return get_swagger_ui_html(
openapi_url=app.openapi_url,
title=app.title,
swagger_favicon_url="/static/favicon.ico",
)
@app.get("/redoc", include_in_schema=False)
async def redoc_html():
return get_redoc_html(
openapi_url=app.openapi_url,
title=app.title + " - ReDoc",
redoc_favicon_url="/static/favicon.ico",
)
@app.get("/", include_in_schema=False)
async def root():
response = RedirectResponse(url='/docs')
return response
# return {"message": "go to /docs for the documentation"}
@app.get("/telemetry/if_name", tags=["telemetry"], response_model=IfName)
async def get_interface_name():
# run ip -j link
try:
process = subprocess.run(["ip",'-j',"address"],universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
list_json = json.loads(process.stdout)
except:
list_json = []
list_ifname = []
idx = 0
for data in list_json:
try:
list_ifname.append({str(idx):{'if_name':data['ifname'],'ip_addr':data['addr_info'][0]['local']}})
idx += 1
except:
pass
if list_ifname:
return {"status":"success", "list_if":list_ifname}
else:
return {"status":"error", "list_if":list_ifname}
@app.get("/mptcp/scheduler", tags=["MPTCP"])
async def get_scheduler():
tmp = wrr.get_mptcp_current_scheduler().split('=')[1].strip()
if tmp:
return {"status":"success", 'scheduler':wrr.get_mptcp_current_scheduler().split('=')[1].strip()}
else:
return {"status":"error"}
# @app.post("/mptcp/set_rules", tags=["MPTCP"])
# async def set_rules(rules: Rules):
# try:
# status = wrr.set_local_interfaces_rules(rules.rule)
# return {'status':'success', 'msg': f"The rule is {rules.rule}"}
# except ValueError as e:
# return {'status':'error', 'msg': f"The rule is {rules.rule}", 'error_msg': str(e)}
# @app.get("/mptcp/metrics", tags=["MPTCP"])
# async def get_metrics():
# try:
# mptcp_sockets=wrr.get_mptcp_sockets()
# # For each socket
# for mptcp_socket in mptcp_sockets:
# # We get the identifier of this socket (its inode)
# inode=mptcp_socket["inode"]
# mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode)
# telemetries = []
# for subflow in mptcp_subflows:
# telemetries.append(wrr.get_mptcp_telemetry([subflow]))
# telemetries = list(itertools.chain(*telemetries))
# return {"status":"success", "telemetries":telemetries}
# except Exception as e:
# return {"status":"error", "message": str(e)}
@app.get("/cpe/services", tags=["services"], response_model=Services)
async def get_cpe_services():
services_list=[{"redundant":{"enabled":True}},{"low-delay":{"enabled":True}},{"weighted-round-robin":{"enabled":True}}]
return { "services":services_list}
@app.get("/cpe/services/{service}/sockets/{inode}/scheduler/mptcp/weights", tags=["MPTCP"])
async def cpe_scheduler_mptcp_get_rules(service,inode):
try:
status = wrr.get_local_interfaces_rules()
print(status)
return {'status':'success', 'rules': status}
except ValueError as e:
return {'status':'error'}
@app.post("/cpe/services/{service}/sockets/{inode}/scheduler/mptcp/weights", tags=["MPTCP"])
async def cpe_scheduler_mptcp_set_rules(service, inode, rules: Rules):
try:
status = wrr.set_local_interfaces_rules(rules.rule)
return {'status':'success', 'msg': f"The rule is {rules.rule}"}
except ValueError as e:
return {'status':'error', 'msg': f"The rule is {rules.rule}", 'error_msg': str(e)}
@app.get("/cpe/services/{service}/sockets/{inode}", tags=["services"])
async def get_cpe_service_socket_inode(service,inode):
service_to_scheduler={"weighted-round-robin":"roundrobin","redundant":"redundant","low-delay":"default"}
sched=service_to_scheduler[service]
inode=int(inode)
service_info=[]
try:
# Only for WRR:
if service=="weighted-round-robin":
scheduler=wrr.get_mptcp_socket_scheduler(inode,namespace)
print("-> "+str(inode)+" "+sched)
print(scheduler)
if scheduler==sched:
mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode,namespace)
service_info.append({"inode":inode, "scheduler": sched, "subflows":mptcp_subflows})
else:
service_info.append({"inode":inode, "scheduler": sched})
return {"status":"success", "socket_info":service_info}
except:
return {"status":"error"}
@app.get("/cpe/services/{service}/sockets", tags=["services"])
async def get_cpe_service_sockets(service):
service_to_scheduler={"weighted-round-robin":"roundrobin","redundant":"redundant","low-delay":"default"}
sched=service_to_scheduler[service]
service_info=[]
try:
# Only for WRR:
if service=="weighted-round-robin":
mptcp_sockets=wrr.get_mptcp_sockets(namespace)
# For each socket
for mptcp_socket in mptcp_sockets:
# We get the identifier of this socket (its inode)
inode=mptcp_socket["inode"]
scheduler=wrr.get_mptcp_socket_scheduler(inode,namespace)
print(scheduler)
if scheduler==sched:
mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode,namespace)
service_info.append({"inode":inode, "scheduler": sched, "subflows":mptcp_subflows})
else:
service_info.append({"inode":inode, "scheduler": sched})
return {"status":"success", "sockets_info":service_info}
except:
return {"status":"error"}
@app.get("/cpe/telemetry/sockets", tags=["telemetry"])
async def get_cpe_telemetry_sockets():
try:
mptcp_sockets=wrr.get_mptcp_sockets(namespace)
telemetries = []
# For each socket
for mptcp_socket in mptcp_sockets:
# We get the identifier of this socket (its inode)
inode=mptcp_socket["inode"]
mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode,namespace)
for subflow in mptcp_subflows:
telemetries.append(wrr.get_mptcp_telemetry([subflow],namespace))
telemetries = list(itertools.chain(*telemetries))
return {"status":"success", "telemetry":telemetries}
except:
return {"status":"error"}
@app.get("/cpe/telemetry/sockets/{inode}", tags=["telemetry"])
async def get_cpe_telemetry_socket_inode(inode):
try:
inode=int(inode)
mptcp_subflows=wrr.get_mptcp_subflows_from_inode(inode,namespace)
telemetries = []
for subflow in mptcp_subflows:
telemetries.append(wrr.get_mptcp_telemetry([subflow],namespace))
telemetries = list(itertools.chain(*telemetries))
return {"status":"success", "telemetry":telemetries}
except:
return {"status":"error"}
@app.get("/ovs/show", tags=["OVS"])
async def get_ovs_status():
try:
process = subprocess.run("ovs-vsctl -f json show".split(" "),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
# parsing
list_text = process.stdout.split('\n')
dict_text = {}
prev_bridge_name = None;
prev_port_name = None;
status_insert_bridge = False;
status_insert_port = False
for i,txt in enumerate(list_text):
if 'Bridge' in txt:
bridge_name = re.split(r" +",txt)[-1]
dict_text[bridge_name] = {}
prev_bridge_name = bridge_name
status_insert_bridge = True
continue
if 'Port' in txt:
port_name = re.split(r" +",txt)[-1]
dict_text[bridge_name][port_name] = {}
prev_port_name = port_name
status_insert_port = True
continue
tmp = re.match(r"^ *[a-zA-Z]",txt)
if tmp is not None:
if tmp.span()[-1]-1 == 12:
key = re.split(r" +",txt)[1].replace(":","")
value = "".join(re.split(r" +",txt)[2:])
dict_text[bridge_name][port_name][key] = value
return {"status":"success", "message":dict_text}
else:
return {"status":"error", "message":"An error occured"}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.get("/ovs/bridge_info/{bridge_name}", tags=["OVS"])
async def get_ovs_bridge_info(bridge_name: str):
try:
process = subprocess.run(f"/home/vagrant/vagrant/OVS/ovs_show_of.sh -b {bridge_name}".split(" "),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
text = process.stdout
list_text = text.split('\n')
list_text.pop()
# parsing
prev_port_name = None;
## get the port names
port_names = []
tmp = re.findall(r".*\d\((.*)\)",text)
if tmp:
port_names.append(tmp)
tmp = re.findall(r".*LOCAL\((.*)\)",text)
if tmp:
port_names.append(tmp)
## flatten
port_names = list(itertools.chain(*port_names))
## initiate dict
dict_text = {}
tmp = re.findall(r".*n_tables:(\d+)",text)
if tmp:
dict_text['n_tables'] = tmp[0]
tmp = re.findall(r".*n_buffers:(\d+)",text)
if tmp:
dict_text['n_buffers'] = tmp[0]
tmp = re.findall(r".*capabilities: (.+)",text)
if tmp:
dict_text['capabilities'] = tmp[0]
tmp = re.findall(r".*actions: (.+)",text)
if tmp:
dict_text['actions'] = tmp[0]
for port_name in port_names:
dict_text[port_name] = {}
## loop
for i,txt in enumerate(list_text):
port_name = [el for el in port_names if isinstance(el, Iterable) and (el in txt)]
if port_name:
port_name = port_name[0]
tmp = re.findall(r".*addr:(.+)",txt)
dict_text[port_name]["addr"] = tmp[0]
prev_port_name = port_name
continue
tmp = re.match(r"^ *[a-zA-Z]",txt)
if tmp is not None:
if tmp.span()[-1]-1 == 5:
key = re.split(r" +",txt)[1].replace(":","")
value = "".join(re.split(r" +",txt)[2:])
dict_text[prev_port_name][key] = value
return {"status":"success", "message":dict_text}
else:
return {"status":"error", "message":"An error occured"}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/vlan/set_vlan", tags=["VLAN"])
async def set_vlan(trunk: Trunk):
try:
text = f"-i {trunk.interface}"
for id in trunk.vlanid:
text = text + " -v " + str(id)
text = "bash /home/vagrant/vagrant/OVS/ovs_add_rule_trunk_port.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {'status':'success', 'msg': text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/vlan/del_vlan", tags=["VLAN"])
async def del_vlan(trunk: Trunk):
try:
text = f"-i {trunk.interface}"
for id in trunk.vlanid:
text = text + " -v " + str(id)
text = "bash /home/vagrant/vagrant/OVS/ovs_remove_rule_trunk_port.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {'status':'success', 'msg': text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/vlan/add_rule_access_port", tags=["VLAN"])
async def add_rule_access_port(trunk: Trunk):
try:
text = f"-i {trunk.interface}"
for id in trunk.vlanid:
text = text + " -v " + str(id)
text = "bash /home/vagrant/vagrant/OVS/ovs_add_rule_access_port.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {'status':'success', 'msg': text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/vlan/remove_rule_access_port", tags=["VLAN"])
async def remove_rule_access_port(trunk: Trunk):
try:
text = f"-i {trunk.interface}"
for id in trunk.vlanid:
text = text + " -v " + str(id)
text = "bash /home/vagrant/vagrant/OVS/ovs_remove_rule_access_port.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {'status':'success', 'msg': text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.get("/flow/show/{bridge_name}", tags=["Flow rules"])
async def show_of_flows(bridge_name: str):
try:
process = subprocess.run(f"/home/vagrant/vagrant/OVS/ovs_show_of_flows.sh -b {bridge_name}".split(" "),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
text = process.stdout
list_split = re.split(r"(\S+)=(\S+)",text)
list_split.pop()
# remove comma
list_split = [txt.replace(',','') for txt in list_split]
dict_text = {}
for i in range(1, int(len(list_split)/3), 1):
dict_text[list_split[3*i+1]] = list_split[3*i+2]
return {"status":"success", "message":dict_text}
else:
return {"status":"error", "message":"An error occured"}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.get("/flow/del_all", tags=["Flow rules"])
async def remove_all_flows():
try:
process = subprocess.run(f"/home/vagrant/vagrant/OVS/cpe_remove_all_flows.sh".split(" "),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":"All flows are removed"}
else:
return {"status":"error", "message":"An error occured"}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/flow/add_tuple_rule", tags=["Flow rules"])
async def add_tuple_rule(tuple: Tuple):
try:
text = ""
if tuple.ip_source:
text = text + f" -s {tuple.ip_source}"
if tuple.ip_destination:
text = text + f" -d {tuple.ip_destination}"
if tuple.port_source:
text = text + f" -S {tuple.port_source}"
if tuple.port_destination:
text = text + f" -D {tuple.port_destination}"
if tuple.protocol:
text = text + f" -p {tuple.protocol}"
if tuple.proxy:
text = text + f" -P {tuple.proxy}"
text = "bash /home/vagrant/vagrant/OVS/cpe_add_tuple_rule.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/flow/remove_tuple_rule", tags=["Flow rules"])
async def remove_tuple_rule(tuple: Tuple):
try:
text = ""
if tuple.ip_source:
text = text + f" -s {tuple.ip_source}"
if tuple.ip_destination:
text = text + f" -d {tuple.ip_destination}"
if tuple.port_source:
text = text + f" -S {tuple.port_source}"
if tuple.port_destination:
text = text + f" -D {tuple.port_destination}"
if tuple.protocol:
text = text + f" -p {tuple.protocol}"
text = "bash /home/vagrant/vagrant/OVS/cpe_remove_tuple_rule.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/flow/configure_client", tags=["Flow rules"])
async def configure_client(tuple: Tuple):
try:
text = ""
if tuple.ip_source:
text = text + f" -s {tuple.ip_source}"
if tuple.ip_destination:
text = text + f" -d {tuple.ip_destination}"
if tuple.port_source:
text = text + f" -S {tuple.port_source}"
if tuple.port_destination:
text = text + f" -D {tuple.port_destination}"
if tuple.protocol:
text = text + f" -p {tuple.protocol}"
if tuple.proxy:
text = text + f" -P {tuple.proxy}"
text = "bash /home/vagrant/vagrant/OVS/cpe_configure_client.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/utils/add_delay_to_path", tags=["Utils"])
async def add_delay_to_path(delay: Delay):
try:
text = ""
if delay.path:
text = text + f" -p {delay.path}"
if delay.delay:
text = text + f" -d {int(delay.delay)}"
text = "bash /home/vagrant/vagrant/OVS/cpe_add_delay_to_path.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
@app.post("/utils/remove_delay_to_path", tags=["Utils"])
async def remove_delay_to_path(delay: Delay):
try:
text = ""
if delay.path:
text = text + f" -p {delay.path}"
text = "bash /home/vagrant/vagrant/OVS/cpe_remove_delay_to_path.sh "+ text;
process = subprocess.run(re.split(r" +",text),universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.returncode == 0:
return {"status":"success", "message":text}
else:
return {"status":"error", "message":"An error occured", "command": text, 'response':process.stderr}
except Exception as e:
return {"status":"error", "message": str(e)}
``` |
{
"source": "jorgenbele/aoc-deltatime",
"score": 3
} |
#### File: jorgenbele/aoc-deltatime/main.py
```python
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime, timedelta
from docopt import docopt
from rich import print as rprint
from rich.console import Console
from rich.table import Table
from typing import Any, Dict, List, Optional
import json
import os
import requests
import sys
import time
@dataclass
class Flags:
year: int
json_path: str
update: bool
show_ranking: bool
show_total: bool
leaderboard_id: str
cookie: str
@dataclass
class Member:
id: str
name: str
score: Any
stars: Any
days: Any
sum_dt: Optional[int]
avg_dt: Optional[float]
@dataclass
class AOCData:
year: int
members: Dict[str, Member]
days_dt: Dict[str, Any]
last_day: int
def ranked_days_dt(self, day):
return sorted(list(self.days_dt[day].items()), key=lambda x: x[1])
console = Console()
def display_table(header, rows, justify_right=None):
if justify_right == None:
justify_right = []
table = Table(show_header=True, header_style="bold blue")
list(
map(
lambda ih: table.add_column(
ih[1], justify=("right" if ih[0] in justify_right else None)
),
enumerate(header),
)
)
list(map(lambda r: table.add_row(*r), rows))
console.print(table)
def parse_data(d: Dict[Any, Any]) -> AOCData:
year = d["event"]
last_day = 1
members = {}
days_dt: Dict[str, Dict[str, int]] = {}
for member_id, member_data in d["members"].items():
member = Member(
id=member_id,
name=member_data["name"],
score=member_data["local_score"],
stars=member_data["stars"],
days={},
sum_dt=None,
avg_dt=None,
)
sum_dt = 0
count = 0
for day, day_data in member_data["completion_day_level"].items():
last_day = max(last_day, int(day))
delta_time = (
day_data.get("2", {"get_star_ts": 0})["get_star_ts"]
- day_data["1"]["get_star_ts"]
)
if "2" in day_data:
sum_dt += delta_time
count += 1
member.days[day] = {
"delta_time": delta_time,
"first_complete": day_data["1"]["get_star_ts"],
"second_complete": day_data.get("2", {"get_star_ts": 0}),
}
if not day in days_dt:
days_dt[day] = {}
days_dt[day][member_id] = delta_time
if count == 0:
average_dt = -1.0
else:
average_dt = sum_dt / count
member.avg_dt = average_dt
member.sum_dt = sum_dt
members[member_id] = member
return AOCData(year=year, members=members, days_dt=days_dt, last_day=last_day)
def update_if_possible(flags: Flags):
try:
last_modified = os.path.getmtime(flags.json_path)
now = time.time()
diff = now - last_modified
if diff < 15 * 60:
rprint("[bold red]Refusing to update, modified time too recent")
return True # already exists file
except FileNotFoundError:
pass
r = requests.get(
f"https://adventofcode.com/{flags.year}/leaderboard/private/view/{flags.leaderboard_id}.json",
cookies={"session": flags.cookie},
)
if not r.ok:
rprint(f"[bold red]{r}")
return False
data = r.json()
with open(flags.json_path, "w") as f:
json.dump(data, f)
rprint(f"[bold green]Wrote updated score file to {flags.json_path}")
return True
def format_dt(dt: int):
if dt > 60 * 60 * 24:
return ">24h"
s = dt % 60
m = ((dt - s) % (60 * 60)) // 60
h = ((dt - s - m * 60) % (60 * 60 * 60)) // 3600
d = ((dt - s - m * 60 - h * 60) % (60 * 60 * 60 * 24)) // (3600 * 24)
out = ""
if d > 0:
out += f"{d}d"
if h > 0:
out += f"{h}h"
if m > 0:
out += f"{m}m"
if s > 0:
out += f"{s}s"
return out
def display_ranking(data: AOCData):
last_day = max(map(lambda x: int(x), data.days_dt.keys()))
header = ["Name", "Delta time", "Points"]
for day in range(1, last_day + 1):
rows = [
[
data.members[member_id].name,
format_dt(dt),
str(len(data.members) - rank),
]
for rank, (member_id, dt) in enumerate(data.ranked_days_dt(str(day)))
if len(data.members) - rank > 0
]
rprint(f"[bold] Day {day}")
display_table(header, rows, justify_right=[2])
def display_total(data: AOCData):
total_points: Dict[str, int] = defaultdict(lambda: 0)
for day in range(1, data.last_day + 1):
scores = sorted(list(data.days_dt[str(day)].items()), key=lambda x: x[1])
for rank, (member_id, dt) in enumerate(scores):
total_points[member_id] += len(data.members) - rank
id_points_ordered = sorted(list(total_points.items()), key=lambda x: -x[1])
header = ["Name", "Total Points"]
rows = [
[data.members[member_id].name, str(points)]
for member_id, points in id_points_ordered
]
rprint(f"[bold] Total")
display_table(header, rows)
def run(flags: Flags):
if flags.update:
with console.status("[bold green]Fetching data...") as status:
assert update_if_possible(flags), "Unable to fetch data from API"
with open(flags.json_path) as f:
data = json.load(f)
data = parse_data(data)
if flags.show_ranking:
display_ranking(data)
if flags.show_total:
display_total(data)
def main():
args = docopt(__doc__, sys.argv[1:])
verbose = args.get("--verbose", False)
if verbose:
print(args)
year = args.get("--year") or datetime.now().year
update = args.get("--update", False)
leaderboard_id = args.get("--leaderboard") or os.environ.get("AOC_LEADERBOARD_ID")
assert leaderboard_id
cookie = args.get("--cookie") or os.environ.get("AOC_COOKIE")
assert cookie
json_path = args.get("--file") or f"{year}_{leaderboard_id}.json"
show_ranking = args.get("--ranking", False)
show_total = args.get("--total", False)
flags = Flags(
json_path=json_path,
year=year,
show_ranking=show_ranking,
show_total=show_total,
update=update,
cookie=cookie,
leaderboard_id=leaderboard_id,
)
run(flags)
if __name__ == "__main__":
main()
``` |
{
"source": "jorgenbuilder/python-mnemonic",
"score": 2
} |
#### File: jorgenbuilder/python-mnemonic/test_mnemonic.py
```python
import json
import random
import unittest
from mnemonic import Mnemonic
class MnemonicTest(unittest.TestCase):
def _check_list(self, language, vectors):
mnemo = Mnemonic(language)
for v in vectors:
code = mnemo.to_mnemonic(bytes.fromhex(v[0]))
seed = Mnemonic.to_seed(code, passphrase="<PASSWORD>")
xprv = Mnemonic.to_hd_master_key(seed)
self.assertIs(mnemo.check(v[1]), True)
self.assertEqual(v[1], code)
self.assertEqual(v[2], seed.hex())
self.assertEqual(v[3], xprv)
def test_vectors(self):
with open("vectors.json", "r") as f:
vectors = json.load(f)
for lang in vectors.keys():
self._check_list(lang, vectors[lang])
def test_failed_checksum(self):
code = (
"bless cloud wheel regular tiny venue bird web grief security dignity zoo"
)
mnemo = Mnemonic("english")
self.assertFalse(mnemo.check(code))
def test_detection(self):
self.assertEqual("english", Mnemonic.detect_language("security"))
with self.assertRaises(Exception):
Mnemonic.detect_language("xxxxxxx")
def test_utf8_nfkd(self):
# The same sentence in various UTF-8 forms
words_nfkd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
words_nfc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfkc = u"P\u0159\xed\u0161ern\u011b \u017elu\u0165ou\u010dk\xfd k\u016f\u0148 \xfap\u011bl \u010f\xe1belsk\xe9 \xf3dy z\xe1ke\u0159n\xfd u\u010de\u0148 b\u011b\u017e\xed pod\xe9l z\xf3ny \xfal\u016f"
words_nfd = u"Pr\u030ci\u0301s\u030cerne\u030c z\u030clut\u030couc\u030cky\u0301 ku\u030an\u030c u\u0301pe\u030cl d\u030ca\u0301belske\u0301 o\u0301dy za\u0301ker\u030cny\u0301 uc\u030cen\u030c be\u030cz\u030ci\u0301 pode\u0301l zo\u0301ny u\u0301lu\u030a"
passphrase_nfkd = (
u"<PASSWORD>\u<PASSWORD>\u03<PASSWORD>ne\u030c <PASSWORD>pec\u<PASSWORD>cne\u0<PASSWORD> <PASSWORD>"
)
passphrase_nfc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfkc = (
u"Neuv\u011b\u0159iteln\u011b bezpe\u010dn\xe9 hesl\xed\u010dko"
)
passphrase_nfd = (
u"Neuve\<PASSWORD>030citelne\u030c bezpec\u030cne\u0301 hesli\u0301c\u030cko"
)
seed_nfkd = Mnemonic.to_seed(words_nfkd, passphrase_nfkd)
seed_nfc = Mnemonic.to_seed(words_nfc, passphrase_nfc)
seed_nfkc = Mnemonic.to_seed(words_nfkc, passphrase_nfkc)
seed_nfd = Mnemonic.to_seed(words_nfd, passphrase_nfd)
self.assertEqual(seed_nfkd, seed_nfc)
self.assertEqual(seed_nfkd, seed_nfkc)
self.assertEqual(seed_nfkd, seed_nfd)
def test_to_entropy(self):
data = [bytes(random.getrandbits(8) for _ in range(32)) for _ in range(1024)]
data.append(b"Lorem ipsum dolor sit amet amet.")
m = Mnemonic("english")
for d in data:
self.assertEqual(m.to_entropy(m.to_mnemonic(d).split()), d)
def test_expand_word(self):
m = Mnemonic("english")
self.assertEqual("", m.expand_word(""))
self.assertEqual(" ", m.expand_word(" "))
self.assertEqual("access", m.expand_word("access")) # word in list
self.assertEqual(
"access", m.expand_word("acce")
) # unique prefix expanded to word in list
self.assertEqual("acb", m.expand_word("acb")) # not found at all
self.assertEqual("acc", m.expand_word("acc")) # multi-prefix match
self.assertEqual("act", m.expand_word("act")) # exact three letter match
self.assertEqual(
"action", m.expand_word("acti")
) # unique prefix expanded to word in list
def test_expand(self):
m = Mnemonic("english")
self.assertEqual("access", m.expand("access"))
self.assertEqual(
"access access acb acc act action", m.expand("access acce acb acc act acti")
)
def __main__():
unittest.main()
if __name__ == "__main__":
__main__()
``` |
{
"source": "jorgenDK/tuya-local",
"score": 2
} |
#### File: tests/devices/base_device_tests.py
```python
from unittest import IsolatedAsyncioTestCase
from unittest.mock import AsyncMock, patch
from custom_components.tuya_local.generic.climate import TuyaLocalClimate
from custom_components.tuya_local.generic.fan import TuyaLocalFan
from custom_components.tuya_local.generic.humidifier import TuyaLocalHumidifier
from custom_components.tuya_local.generic.light import TuyaLocalLight
from custom_components.tuya_local.generic.lock import TuyaLocalLock
from custom_components.tuya_local.generic.switch import TuyaLocalSwitch
from custom_components.tuya_local.helpers.device_config import (
TuyaDeviceConfig,
possible_matches,
)
DEVICE_TYPES = {
"climate": TuyaLocalClimate,
"fan": TuyaLocalFan,
"humidifier": TuyaLocalHumidifier,
"light": TuyaLocalLight,
"lock": TuyaLocalLock,
"switch": TuyaLocalSwitch,
}
class TuyaDeviceTestCase(IsolatedAsyncioTestCase):
__test__ = False
def setUpForConfig(self, config_file, payload):
"""Perform setup tasks for every test."""
device_patcher = patch("custom_components.tuya_local.device.TuyaLocalDevice")
self.addCleanup(device_patcher.stop)
self.mock_device = device_patcher.start()
self.dps = payload.copy()
self.mock_device.get_property.side_effect = lambda id: self.dps[id]
cfg = TuyaDeviceConfig(config_file)
self.conf_type = cfg.legacy_type
self.mock_device.name = cfg.name
self.entities = {}
self.entities[cfg.primary_entity.entity] = self.create_entity(
cfg.primary_entity
)
self.names = {}
self.names[cfg.primary_entity.entity] = cfg.primary_entity.name
for e in cfg.secondary_entities():
self.entities[e.entity] = self.create_entity(e)
self.names[e.entity] = e.name
def create_entity(self, config):
"""Create an entity to match the config"""
dev_type = DEVICE_TYPES[config.entity]
if dev_type:
return dev_type(self.mock_device, config)
def test_config_matched(self):
for cfg in possible_matches(self.dps):
if cfg.legacy_type == self.conf_type:
self.assertEqual(cfg.match_quality(self.dps), 100.0)
return
self.fail()
def test_should_poll(self):
for e in self.entities.values():
self.assertTrue(e.should_poll)
def test_name_returns_device_name(self):
for e in self.entities.values():
self.assertEqual(e.name, self.mock_device.name)
def test_friendly_name_returns_config_name(self):
for e in self.entities:
self.assertEqual(self.entities[e].friendly_name, self.names[e])
def test_unique_id_returns_device_unique_id(self):
for e in self.entities.values():
self.assertEqual(e.unique_id, self.mock_device.unique_id)
def test_device_info_returns_device_info_from_device(self):
for e in self.entities.values():
self.assertEqual(e.device_info, self.mock_device.device_info)
async def test_update(self):
for e in self.entities.values():
result = AsyncMock()
self.mock_device.async_refresh.return_value = result()
self.mock_device.async_refresh.reset_mock()
await e.async_update()
self.mock_device.async_refresh.assert_called_once()
result.assert_awaited()
``` |
{
"source": "JorgenLiu/hannibal",
"score": 3
} |
#### File: hannibal/parser/parser.py
```python
import time
import asyncio
from threading import Thread
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
class BaseParser(object):
_BEFORE_PARSE_MIDDLEWARE_LIST = []
_AFTER_PARSE_MIDDLEWARE_LIST = []
def __init__(self, mission_queue, parse_queue, href_pool):
self.mission_queue = mission_queue
self.parse_queue = parse_queue
self.href_pool = href_pool
self.meta_url = None
self.url_pattern = None
self.parse_loop = asyncio.new_event_loop()
def pre_process(self, response_object):
"""Method for processing the response content into content object,
which could be achieved by RegEx, BeautifulSoup or PyQuery."""
return response_object
async def parse_page(self, response_content):
content_object = self.pre_process(response_content)
extract_data_task = asyncio.ensure_future(self.extract_data(content_object), loop=self.parse_loop)
asyncio.run_coroutine_threadsafe(await extract_data_task, loop=self.parse_loop)
async def extract_data(self, content_object) -> None:
"""extract data from content"""
raise NotImplementedError
@staticmethod
def _start_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
def conquer(self):
t = Thread(target=self._start_loop, args=(self.parse_loop,))
t.setDaemon(True)
t.start()
try:
while 1:
if self.parse_queue.queue_size == 0:
time.sleep(10)
else:
html_content = self.parse_queue.dequeue()
if html_content:
asyncio.run_coroutine_threadsafe(self.parse_page(html_content), loop=self.parse_loop)
except KeyboardInterrupt:
self.mission_queue.serialize()
self.href_pool.serialize()
finally:
pass
class IncreasingParser(object):
def __init__(self, mission_queue, parse_queue, href_pool):
self.mission_queue = mission_queue
self.parse_queue = parse_queue
self.href_pool = href_pool
self.url_pattern = None
self.parse_loop = asyncio.new_event_loop()
def pre_process(self, response_object):
"""Method for processing the response content into content object,
which could be achieved by RegEx, BeautifulSoup or PyQuery."""
return response_object
async def parse_page(self, response_content):
content_object = self.pre_process(response_content)
extract_data_task = asyncio.ensure_future(self.extract_data(content_object), loop=self.parse_loop)
asyncio.run_coroutine_threadsafe(await extract_data_task, loop=self.parse_loop)
process_task = asyncio.ensure_future(self.extract_sub_href(content_object), loop=self.parse_loop)
asyncio.run_coroutine_threadsafe(await process_task, loop=self.parse_loop)
def go_forward(self, content_object) -> bool:
"""return whether go forward"""
raise NotImplementedError
def extract_sub_list(self, content_object) -> list:
"""return sub href list"""
raise NotImplementedError
async def extract_sub_href(self, content_object):
sub_mission_list = self.extract_sub_list(content_object)
if self.go_forward(content_object):
for mission in sub_mission_list:
if not self.href_pool.is_duplicate(mission.unique_tag):
self.mission_queue.enqueue(mission.serialize())
async def extract_data(self, content_object):
"""extract data from content"""
raise NotImplementedError
@staticmethod
def _start_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
def conquer(self):
t = Thread(target=self._start_loop, args=(self.parse_loop,))
t.setDaemon(True)
t.start()
try:
while 1:
if self.parse_queue.queue_size == 0:
time.sleep(10)
else:
html_content = self.parse_queue.dequeue()
if html_content:
asyncio.run_coroutine_threadsafe(self.parse_page(html_content), loop=self.parse_loop)
except KeyboardInterrupt:
self.mission_queue.serialize()
self.href_pool.serialize()
finally:
pass
```
#### File: hannibal/spider/distribute_collector.py
```python
import aiohttp
import asyncio
from hannibal.util import TrickHelper, Mission
from collections import deque
from queue import Queue
from threading import Thread
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
class DistributeCollector(object):
_BEFORE_COLLECT_MIDDLEWARE_LIST = deque()
_ERROR_HANDLER_DICT = dict()
def __init__(self, mission_queue, parse_queue, href_pool, seed_mission=None, cache_size=3, *args, **kwargs):
self.mission_queue = mission_queue
self.parse_queue = parse_queue
self.href_pool = href_pool
if seed_mission:
self.mission_queue.enqueue(seed_mission.serialize())
self.trick_helper = TrickHelper()
self.collect_loop = asyncio.new_event_loop()
self.semaphore = asyncio.Semaphore(cache_size, loop=self.collect_loop)
self.client_session = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(verify_ssl=False, loop=self.collect_loop), loop=self.collect_loop)
self.collect_queue = Queue(maxsize=cache_size)
@staticmethod
def register_error_handler(code, handler, *args, **kwargs):
assert isinstance(code, int)
assert callable(handler)
DistributeCollector._ERROR_HANDLER_DICT[code] = handler
@staticmethod
def register_middleware(middleware, *args, **kwargs):
assert callable(middleware)
DistributeCollector._BEFORE_COLLECT_MIDDLEWARE_LIST.append(middleware)
async def collect(self, *args, **kwargs):
with (await self.semaphore):
mission_str = self.collect_queue.get()
if isinstance(mission_str, bytes):
mission_str = mission_str.decode('utf-8')
mission = Mission.deserialize(mission_str)
if not self.href_pool.is_duplicate(mission.unique_tag):
await self._collect(mission)
def _insert_url(self, future):
processed_url = future.result()
if processed_url:
self.href_pool.insert(processed_url)
async def _collect(self, mission):
url = mission.url
headers = self.trick_helper.trick()
async with self.client_session.request(mission.method, url,
**{mission.data_type: mission.data, 'headers': headers})as response:
process_task = asyncio.ensure_future(self.process_response(response, mission), loop=self.collect_loop)
process_task.add_done_callback(self._insert_url)
asyncio.run_coroutine_threadsafe(await process_task, loop=self.collect_loop)
async def process_response(self, response, mission, *args, **kwargs):
status_code = int(response.status)
if 400 < status_code < 600:
handler = DistributeCollector._ERROR_HANDLER_DICT.get(status_code, None)
if handler:
handler(response, mission)
return None
else:
html_body = await response.text(encoding='utf-8')
self.parse_queue.enqueue(html_body)
return mission.unique_tag
def _pop_url(self) -> str:
try:
url = self.mission_queue.dequeue()
return url if url else ''
except Exception as e:
print(e)
return ''
@staticmethod
def _start_loop(loop):
asyncio.set_event_loop(loop)
loop.run_forever()
def conquer(self, limited=False):
try:
t = Thread(target=self._start_loop, args=(self.collect_loop,))
t.setDaemon(True)
t.start()
while 1:
if not self.collect_queue.full():
url = self._pop_url()
if url:
self.collect_queue.put(url)
if limited and url == self.mission_queue.endpoint:
break
if url:
asyncio.run_coroutine_threadsafe(self.collect(), loop=self.collect_loop)
except KeyboardInterrupt:
self.mission_queue.serialize()
self.href_pool.serialize()
finally:
asyncio.run_coroutine_threadsafe(self.client_session.close(), loop=self.collect_loop)
``` |
{
"source": "jorgenmyrvold/tpk4128-python",
"score": 3
} |
#### File: tpk4128-python/scripts/run_client_opencv.py
```python
from tpk4128.client import SocketClient
import time
import cv2
import numpy as np
def main():
client = SocketClient('10.0.0.7', 50007)
while True:
client.sendall(b'Hello World!')
# Tip: len(img.tostring())
size, data = client.recv(2764800) # my macbook 2764800
if not data:
break
# Tip: img.dtype, img.shape
img = np.frombuffer(data, dtype=np.uint8).reshape(720, 1280, 3) # Dimentions 720, 1280, 3
cv2.imshow('img', img)
if cv2.waitKey(20) == 27: # Esc: 27
break
if __name__ == '__main__':
main()
```
#### File: tpk4128-python/tpk4128/camera_opencv.py
```python
import numpy as np
import cv2 as cv
class Camera(object):
def __init__(self):
# Implement this constructor that opens a webcam and stores it in self._camera
self._camera = cv.VideoCapture(0)
if not self._camera.isOpened():
print("Not able to open camera")
exit()
def capture(self):
# Implement this function that grabs an image from the webcam and returns a numpy array
ret, img = self._camera.read()
if not ret:
print("Error reading from camera")
return img
def __del__(self):
# Implement this destructor. Remember to free the camera.
self._camera.release()
cv.destroyAllWindows()
if __name__ == "__main__":
cam = Camera()
while True:
img = cam.capture()
print(len(img.tostring()), img.dtype, img.shape)
cv.imshow("image", img)
cv.waitKey(1)
``` |
{
"source": "jorgenorena/Metodos-Numericos",
"score": 3
} |
#### File: jorgenorena/Metodos-Numericos/ejemplo.py
```python
def ejemplo1():
print('Una función')
def ejemplo2():
print('Otra cosa')
return 1
``` |
{
"source": "Jorgen-P/ansible-helpscripts",
"score": 3
} |
#### File: Jorgen-P/ansible-helpscripts/debconf.py
```python
import sys
def main():
"""Convert debconf-get-selections output on stdin to Ansible YAML debconf tasks on stdout"""
contains_passwords = False
contains_quotes = False
for l in sys.stdin:
w = l.rstrip("\n").split("\t", 4)
# Two spaces indentation
ind = " "
if len(w) == 4:
print(f"- name: debconf {w[1]}")
print(ind*1 + "debconf:" )
print(ind*2 + f"name: {w[0]}")
print(ind*2 + f"question: {w[1]}")
print(ind*2 + f"vtype: {w[2]}")
print(ind*2 + f"value: \'{w[3]}\'")
# Don't log passwords
if w[2] == "password":
print(ind*1 + "no_log: True")
contains_passwords = True
# Warn if the value contains single quotes
# Print value to make searching reasonable as output will contain a lot of single quotes
if "'" in w[3]:
print(f"Warning: Input contains single quotes which break quoting: {w[3]}", file=sys.stderr)
contains_quotes = True
if contains_passwords:
print("Input contains passwords, consider converting those to variables backed by ansible vault",
file=sys.stderr)
if contains_quotes:
print("Input contains single quotes which break quoting, please fix the generated YAML", file=sys.stderr)
if __name__ == '__main__':
main()
``` |
{
"source": "jorgenriseth/vox-populi",
"score": 3
} |
#### File: jorgenriseth/vox-populi/corpus_creation.py
```python
import os
import sys
import tweepy
import json
class CorpusCreator:
"""
Class for creating corpus structure and loading tweets
using tweepy.
Structure:
-> corpus
---> party1
-----> user11
-----> user12
-----> user13
---> party2
-----> user21
-----> user22
.
.
.
"""
def __init__(self, user_dict=None, rel_path='./',
rate_limit_wait=True):
# Get an API-object authorized from 'credentials.txt'
auth = get_tweet_auth()
self.api = tweepy.API(auth,
wait_on_rate_limit=rate_limit_wait,
wait_on_rate_limit_notify=rate_limit_wait)
self.root = rel_path + 'corpus/'
# Load mp-file from directory
assert type(user_dict) in (str, dict) or user_dict is None,"User_dict wrong format"
if user_dict is None:
user_dict = 'mps.json'
if type(user_dict) is str:
with open(user_dict) as f:
self.users = json.load(f)
elif type(user_dict) is dict:
self.users = user_dict
# Create root filesystem
try:
os.mkdir(self.root)
print('Directory "corpus created.')
print()
except:
print('Directory "corpus" already exists.')
print()
def load_tweets(self, max_items=10000, user=None):
"""
For all users in self.users, get [max_items] tweets and
save each to separate files.
"""
for name, info in self.users.items():
try:
os.mkdir(self.root + info['party'].lower().replace(' ', '_'))
except FileExistsError:
pass
filepath = self.root + info['party'].lower().replace(' ', '_')
filepath = filepath + '/' + name.lower().replace(' ', '')
try:
print(f'Reading tweets from {name}')
user = info['screen_name']
curs = tweepy.Cursor(self.api.user_timeline,
screen_name=user,
count=200,
tweet_mode="extended"
).items(max_items)
with open(filepath + '.jsonl', 'w') as f:
for status in curs:
tweet = status._json
json_dump_line(tweet, f)
except tweepy.TweepError as exc:
print(exc)
os.remove(filepath + '.jsonl')
def get_tweet_auth(auth_file='credentials.txt'):
"""
Get tweepy oauth object given a credentials-file, formatted
as a nltk.twitter-creds file.
"""
keys = []
# Open credentials-file
with open(auth_file, 'r') as f:
for line in f:
# Read only key/token
token = line.split('=')[-1].rstrip('\n')
# Add token to keys-list
if token is not '':
keys.append(line.split('=')[-1].rstrip('\n'))
auth = tweepy.OAuthHandler(*keys[:2])
auth.set_access_token(*keys[2:])
return auth
def json_dump_line(json_object, file_object):
"""
Dumps a dictionay json_object to file_object, adding
a trailing newline, hence creating a json line format.
"""
json.dump(json_object, file_object)
file_object.write('\n')
def rm_empty_json_in_path(path):
"""
Browses through corpus-files and removes any user.json-files which are
empty for various reasons.
"""
assert os.path.isdir(path), "[path] not a valid directory"
# Ensure directories are given with ending '/' for recursion
if path[-1] != '/':
path += '/'
print('Browsing "' + path + '"')
for f in os.listdir(path):
filepath = path + f
if os.path.isfile(filepath) and '.jsonl' in filepath:
try:
if os.path.getsize(filepath) == 0:
print('Removing ' + filepath)
os.remove(filepath)
# Shouldn't happen, but just to make sure.
except OSError as e:
print(e)
pass
elif os.path.isdir(filepath):
# Browse one dir deeper
rm_empty_json_in_path(path + f + '/')
if __name__ == '__main__':
number_of_tweets = int(input("Number of tweets per user:"))
# Ensure that the argument is a positive integer.
assert number_of_tweets > 0, "Number of tweets must be a positive integer"
# Create MP-list and run corpuscreator
assert os.path.isfile('mps.json'), "Run mp_dict_creator.py first."
corpus_creator = CorpusCreator(user_dict='mps.json')
corpus_creator.load_tweets(max_items=number_of_tweets)
rm_empty_json_in_path('corpus/')
```
#### File: jorgenriseth/vox-populi/utils.py
```python
import tweepy
import json
import requests
import matplotlib.pyplot as plt
from bs4 import BeautifulSoup
"""
Printing Macros
"""
class print_color:
PURPLE= '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
GREY = '\033[97m'
BLACK = '\033[98m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
party_color_map = {
'Conservative': print_color.BLUE,
'Democratic Unionist Party': print_color.YELLOW,
'Green Party': print_color.GREEN,
'Independent': print_color.GREY,
'Labour': print_color.RED,
'Liberal Democrat': print_color.PURPLE,
'Plaid Cymru': print_color.DARKCYAN,
'Scottish National Party': print_color.GREY,
'Sinn Fein': print_color.CYAN,
'The Independent Group': print_color.BLACK
}
def get_tweet_auth(auth_file='credentials.txt'):
"""
Get tweepy oauth object given a credentials-file, formatted
as a nltk.twitter-creds file.
"""
keys = []
# Open credentials-file
with open(auth_file, 'r') as f:
for line in f:
# Read only key/token
token = line.split('=')[-1].rstrip('\n')
# Add token to keys-list
if token is not '':
keys.append(line.split('=')[-1].rstrip('\n'))
auth = tweepy.OAuthHandler(*keys[:2])
auth.set_access_token(*keys[2:])
return auth
def json_dump_line(json_object, file_object):
"""
Dumps a dictionay json_object to file_object, adding
a trailing newline, hence creating a json line format.
"""
json.dump(json_object, file_object)
file_object.write('\n')
def user_id_correction(erratafile='user-errata.txt',
user_file='mps.json'):
"""
Fixes errors in user_file, using information stored in erratafile
"""
# Create dictionary with errata[wrong-id] = errata[righ-id]
with open(erratafile, 'r') as f:
errata = {}
for line in f:
line = line.rstrip('\n')
errata[line.split('=')[0]] = line.split('=')[1]
# Load dictironary to be corrected
with open(user_file, 'r') as f:
user_dict = json.load(f)
# Correct the entries in the user-file
for wrong_id, right_id in errata.items():
for name, info in user_dict.items():
if info["screen_name"] == wrong_id:
print()
print('Correction')
print('Old: ', user_dict[name])
user_dict[name]["screen_name"] = right_id
user_dict[name]["url"] = info["url"][:-len(wrong_id)] + right_id
print('New: ', user_dict[name])
print()
# Save corrected dictionary to json.
with open(user_file, 'w') as f:
json.dump(user_dict, f)
def create_user_list(filename='mps.json', errata='user-errata.txt'):
"""
Retrieves list of MP's on twitter, and store their username,
political party, and twitter profile url in a json file,
their full names as keywords.
"""
url = "https://www.mpsontwitter.co.uk/list"
data = requests.get(url)
html = BeautifulSoup(data.text, 'html.parser')
table = html.select("tbody", id='mp_wrapper')[1]
mp_dict = {}
for line in table.select('tr'):
name = line.select('td')[2].get_text().strip()
party = ' '.join(line.td['class'])
twitter_id = line.a.get_text()[1:]
url = line.a['href']
mp_dict[name] = {
"party": party,
"screen_name": twitter_id,
"url": "https://twitter.com/" + twitter_id
}
with open(filename, 'w') as f:
json.dump(mp_dict, f)
if errata is not None:
user_id_correction(erratafile=errata, user_file=filename)
print(f'MPs on Twitter stored in {filename}')
def visualize(df, color_by='label', num_samples='all', marker_labels=True,
ax=None, savename=None):
"""
2D projection plot of all (or a subset of) points in df after
performed clustering. The points color is specified by the value
in the "color_by"-argument.
If num_samples is an integer, it will only plot num_samples data points
randomly sampled from the available data. This might be useful for a high
number of data points.
marker_labels specifies whether the points should be plotted alongside the
points index value. (typically user or party).
If savename is specified, the resulting figure is saved to disk.
"""
if num_samples is not 'all':
groups = df.sample(num_samples).groupby(color_by)
else:
groups = df.groupby(color_by)
if ax is None:
fig, ax = plt.subplots(figsize=(16, 12)) # set size
ax.margins(0.05) # Optional, just adds 5% padding to the autoscaling
# Loop through groups and plot by color
for color, group in groups:
ax.plot(group.posx, group.posy, marker='o', ms=6,
ls='', label=color)
# Configure axes
ax.set_aspect('auto')
ax.tick_params(
axis= 'x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
labelbottom='off')
ax.tick_params(
axis= 'y', # changes apply to the y-axis
which='both', # both major and minor ticks are affected
left='off', # ticks along the bottom edge are off
top='off', # ticks along the top edge are off
labelleft='off')
ax.legend(numpoints=1, loc=2)
if marker_labels:
for i in range(len(group)):
ax.text(group.iloc[i]['posx'] - 1e-2, group.iloc[i]['posy'] + 8e-3,
group.index[i], size=8)
if savename is not None:
plt.savefig(savename, dpi=200)
return ax
def cluster_information(df, vectorizer, clusterer, num_words=15,
show_users=True):
"""
Print clusters, and most used features per cluster.
"""
# Ensure that model is allready fitted
assert hasattr(clusterer, 'cluster_centers_'), "Need to fit clusterer first."
terms = vectorizer.get_feature_names()
order_centroids = clusterer.cluster_centers_.argsort()[:, ::-1]
num_clusters = clusterer.get_params()['n_clusters']
for i in range(num_clusters):
print()
print('=' * 80)
print(print_color.BOLD + "Cluster %d:" % i + print_color.END,
end='')
for ind in order_centroids[i, :15]:
print('%s, ' % terms[ind], end='')
print()
if show_users:
print('-' * 50)
for user in df.loc[df['label'] == i].index:
party = df.loc[user]['party']
print(user + '(' + party_color_map[party] + party + '), '
+ print_color.END, end='')
print()
print('=' * 80)
``` |
{
"source": "jorgenschaefer/monads-for-normal-programmers",
"score": 3
} |
#### File: monads/state/monad.py
```python
from functools import wraps
class Monad(object):
def bind(self, method, args, kwargs):
return method(self, *args, **kwargs)
def bound(method):
@wraps(method)
def bound_method(self, *args, **kwargs):
result = self.bind(method, args, kwargs)
assert(isinstance(result, Monad))
return result
return bound_method
```
#### File: monads/state/state1.py
```python
from monads.state.monad import Monad
class StateMonad(Monad):
def __init__(self, function=lambda x: x):
self.function = function
def run(self, state):
return self.function(state)
class Game(StateMonad):
def move(self, char):
def transformer(state):
on, score = self.function(state)
if char == 'a' and on:
return (on, score + 1)
elif char == 'b' and on:
return (on, score - 1)
elif char == 'c':
return (not on, score)
else:
return (on, score)
return Game(transformer)
```
#### File: monads/state/state2.py
```python
from monads.state.monad import Monad, bound
class StateMonad(Monad):
def __init__(self, function=lambda x: x):
self.function = function
def run(self, state):
return self.function(state)
class Game(StateMonad):
def bind(self, method, args, kwargs):
def transformer(old_state):
current_state = self.run(old_state)
new_state = method(self, current_state,
*args, **kwargs)
return new_state
return Game(transformer)
@bound
def move(self, state, char):
on, score = state
if char == 'a' and on:
return (on, score + 1)
elif char == 'b' and on:
return (on, score - 1)
elif char == 'c':
return (not on, score)
else:
return (on, score)
``` |
{
"source": "jorgensd/asimov-contact",
"score": 2
} |
#### File: python/demos/gradual_loading.py
```python
import argparse
import numpy as np
from dolfinx.fem import Function, VectorFunctionSpace
from dolfinx.io import XDMFFile
from dolfinx.mesh import locate_entities_boundary, meshtags
from mpi4py import MPI
from dolfinx_contact import update_geometry
from dolfinx_contact.meshing import (convert_mesh, create_circle_circle_mesh,
create_circle_plane_mesh,
create_sphere_plane_mesh)
from dolfinx_contact.one_sided.nitsche_rigid_surface_custom import \
nitsche_rigid_surface_custom
if __name__ == "__main__":
desc = "Nitsche's method with rigid surface using custom assemblers and apply gradual loading in non-linear solve"
parser = argparse.ArgumentParser(description=desc,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--theta", default=1, type=np.float64, dest="theta",
help="Theta parameter for Nitsche, 1 symmetric, -1 skew symmetric, 0 Penalty-like")
parser.add_argument("--gamma", default=10, type=np.float64, dest="gamma",
help="Coercivity/Stabilization parameter for Nitsche condition")
_solve = parser.add_mutually_exclusive_group(required=False)
_solve.add_argument('--linear', dest='linear_solver', action='store_true',
help="Use linear solver", default=False)
_3D = parser.add_mutually_exclusive_group(required=False)
_3D.add_argument('--3D', dest='threed', action='store_true',
help="Use 3D mesh", default=False)
_curved = parser.add_mutually_exclusive_group(required=False)
_curved.add_argument('--curved', dest='curved', action='store_true',
help="Use curved rigid surface", default=False)
_strain = parser.add_mutually_exclusive_group(required=False)
_strain.add_argument('--strain', dest='plane_strain', action='store_true',
help="Use plane strain formulation", default=False)
_dirichlet = parser.add_mutually_exclusive_group(required=False)
_dirichlet.add_argument('--dirichlet', dest='dirichlet', action='store_true',
help="Use strong Dirichlet formulation", default=False)
_E = parser.add_argument("--E", default=1e3, type=np.float64, dest="E",
help="Youngs modulus of material")
_nu = parser.add_argument(
"--nu", default=0.1, type=np.float64, dest="nu", help="Poisson's ratio")
_disp = parser.add_argument("--disp", default=0.2, type=np.float64, dest="disp",
help="Displacement BC in negative y direction")
_nload_steps = parser.add_argument("--load_steps", default=1, type=np.int32, dest="nload_steps",
help="Number of steps for gradual loading")
# Parse input arguments or set to defualt values
args = parser.parse_args()
# Current formulation uses unilateral contact
nitsche_parameters = {"gamma": args.gamma, "theta": args.theta}
nitsche_bc = not args.dirichlet
physical_parameters = {"E": args.E, "nu": args.nu, "strain": args.plane_strain}
vertical_displacement = -args.disp
top_value = 1
threed = args.threed
bottom_value = 2
nload_steps = args.nload_steps
curved = args.curved
# Load mesh and create identifier functions for the top (Displacement condition)
# and the bottom (contact condition)
if threed:
fname = "sphere"
create_sphere_plane_mesh(filename=f"{fname}.msh")
convert_mesh(fname, fname, "tetra")
convert_mesh(f"{fname}", f"{fname}_facets", "triangle")
with XDMFFile(MPI.COMM_WORLD, f"{fname}.xdmf", "r") as xdmf:
mesh = xdmf.read_mesh(name="Grid")
tdim = mesh.topology.dim
mesh.topology.create_connectivity(tdim - 1, 0)
mesh.topology.create_connectivity(tdim - 1, tdim)
with XDMFFile(MPI.COMM_WORLD, f"{fname}_facets.xdmf", "r") as xdmf:
facet_marker = xdmf.read_meshtags(mesh, name="Grid")
top_value = 2
bottom_value = 1
surface_value = 8
surface_bottom = 7
else:
if curved:
fname = "two_disks"
create_circle_circle_mesh(filename=f"{fname}.msh")
convert_mesh(fname, fname, "triangle", prune_z=True)
convert_mesh(f"{fname}", f"{fname}_facets", "line", prune_z=True)
with XDMFFile(MPI.COMM_WORLD, f"{fname}.xdmf", "r") as xdmf:
mesh = xdmf.read_mesh(name="Grid")
tdim = mesh.topology.dim
mesh.topology.create_connectivity(tdim - 1, 0)
mesh.topology.create_connectivity(tdim - 1, tdim)
def top1(x):
return x[1] > 0.55
def bottom1(x):
return np.logical_and(x[1] < 0.5, x[1] > 0.15)
def top2(x):
return np.logical_and(x[1] > -0.3, x[1] < 0.15)
def bottom2(x):
return x[1] < -0.35
top_value = 1
bottom_value = 2
surface_value = 3
surface_bottom = 4
# Create meshtag for top and bottom markers
top_facets1 = locate_entities_boundary(mesh, tdim - 1, top1)
bottom_facets1 = locate_entities_boundary(mesh, tdim - 1, bottom1)
top_facets2 = locate_entities_boundary(mesh, tdim - 1, top2)
bottom_facets2 = locate_entities_boundary(mesh, tdim - 1, bottom2)
top_values = np.full(len(top_facets1), top_value, dtype=np.int32)
bottom_values = np.full(len(bottom_facets1), bottom_value, dtype=np.int32)
surface_values = np.full(len(top_facets2), surface_value, dtype=np.int32)
sbottom_values = np.full(len(bottom_facets2), surface_bottom, dtype=np.int32)
indices = np.concatenate([top_facets1, bottom_facets1, top_facets2, bottom_facets2])
values = np.hstack([top_values, bottom_values, surface_values, sbottom_values])
sorted_facets = np.argsort(indices)
facet_marker = meshtags(mesh, tdim - 1, indices[sorted_facets], values[sorted_facets])
else:
fname = "twomeshes"
create_circle_plane_mesh(filename=f"{fname}.msh")
convert_mesh(fname, fname, "triangle", prune_z=True)
convert_mesh(f"{fname}", f"{fname}_facets", "line", prune_z=True)
with XDMFFile(MPI.COMM_WORLD, f"{fname}.xdmf", "r") as xdmf:
mesh = xdmf.read_mesh(name="Grid")
tdim = mesh.topology.dim
mesh.topology.create_connectivity(tdim - 1, 0)
mesh.topology.create_connectivity(tdim - 1, tdim)
with XDMFFile(MPI.COMM_WORLD, f"{fname}_facets.xdmf", "r") as xdmf:
facet_marker = xdmf.read_meshtags(mesh, name="Grid")
top_value = 2
bottom_value = 4
surface_value = 9
surface_bottom = 7
def top(x):
return x[1] > 0.5
def bottom(x):
return np.logical_and(x[1] < 0.45, x[1] > 0.15)
top_value = 1
bottom_value = 2
surface_value = 3
surface_bottom = 4
# Create meshtag for top and bottom markers
top_facets1 = locate_entities_boundary(mesh, tdim - 1, top)
bottom_facets1 = locate_entities_boundary(mesh, tdim - 1, bottom)
top_facets2 = locate_entities_boundary(mesh, tdim - 1, lambda x: np.isclose(x[1], 0.1))
bottom_facets2 = locate_entities_boundary(mesh, tdim - 1, lambda x: np.isclose(x[1], 0.0))
top_values = np.full(len(top_facets1), top_value, dtype=np.int32)
bottom_values = np.full(len(bottom_facets1), bottom_value, dtype=np.int32)
surface_values = np.full(len(top_facets2), surface_value, dtype=np.int32)
sbottom_values = np.full(len(bottom_facets2), surface_bottom, dtype=np.int32)
indices = np.concatenate([top_facets1, bottom_facets1, top_facets2, bottom_facets2])
values = np.hstack([top_values, bottom_values, surface_values, sbottom_values])
sorted_facets = np.argsort(indices)
facet_marker = meshtags(mesh, tdim - 1, indices[sorted_facets], values[sorted_facets])
# Solver options
newton_options = {"relaxation_parameter": 1.0}
# petsc_options = {"ksp_type": "preonly", "pc_type": "lu"}
petsc_options = {"ksp_type": "cg", "pc_type": "gamg", "rtol": 1e-6, "pc_gamg_coarse_eq_limit": 1000,
"mg_levels_ksp_type": "chebyshev", "mg_levels_pc_type": "jacobi",
"mg_levels_esteig_ksp_type": "cg", "matptap_via": "scalable", "ksp_view": None}
# Pack mesh data for Nitsche solver
mesh_data = (facet_marker, top_value, bottom_value, surface_value, surface_bottom)
# Solve contact problem using Nitsche's method
load_increment = vertical_displacement / nload_steps
# Define function space for problem
V = VectorFunctionSpace(mesh, ("CG", 1))
u1 = None
# Data to be stored on the unperturb domain at the end of the simulation
u = Function(V)
u.x.array[:] = np.zeros(u.x.array[:].shape)
geometry = mesh.geometry.x[:].copy()
for j in range(nload_steps):
displacement = load_increment
# Solve contact problem using Nitsche's method
u1 = nitsche_rigid_surface_custom(mesh=mesh, mesh_data=mesh_data, physical_parameters=physical_parameters,
nitsche_parameters=nitsche_parameters, vertical_displacement=displacement,
nitsche_bc=True, quadrature_degree=3, petsc_options=petsc_options,
newton_options=newton_options)
with XDMFFile(mesh.comm, f"results/u_custom_{j}.xdmf", "w") as xdmf:
xdmf.write_mesh(mesh)
u1.name = "u"
xdmf.write_function(u1)
# Perturb mesh with solution displacement
update_geometry(u1._cpp_object, mesh)
# Accumulate displacements
u.x.array[:] += u1.x.array[:]
# Reset mesh to initial state and write accumulated solution
mesh.geometry.x[:] = geometry
with XDMFFile(mesh.comm, "results/u_custom_total.xdmf", "w") as xdmf:
xdmf.write_mesh(mesh)
u.name = "u"
xdmf.write_function(u)
```
#### File: dolfinx_contact/meshing/contact_meshes.py
```python
import gmsh
import numpy as np
from dolfinx.io import (XDMFFile, cell_perm_gmsh, extract_gmsh_geometry,
extract_gmsh_topology_and_markers, ufl_mesh_from_gmsh)
from dolfinx.mesh import CellType, create_mesh
from mpi4py import MPI
__all__ = ["create_circle_plane_mesh", "create_circle_circle_mesh", "create_box_mesh_2D",
"create_box_mesh_3D", "create_sphere_plane_mesh", "create_sphere_sphere_mesh",
"create_cylinder_cylinder_mesh"]
def create_circle_plane_mesh(filename: str, quads: bool = False, res=0.1, order: int = 1):
"""
Create a circular mesh, with center at (0.5,0.5,0) with radius 3 and a box [0,1]x[0,0.1]
"""
center = [0.5, 0.5, 0]
r = 0.3
angle = np.pi / 4
L = 1
H = 0.1
gmsh.initialize()
if MPI.COMM_WORLD.rank == 0:
# Create circular mesh (divided into 4 segments)
c = gmsh.model.occ.addPoint(center[0], center[1], center[2])
# Add 4 points on circle (clockwise, starting in top left)
angles = [angle, -angle, np.pi + angle, np.pi - angle]
c_points = [gmsh.model.occ.addPoint(center[0] + r * np.cos(angle), center[1]
+ r * np.sin(angle), center[2]) for angle in angles]
arcs = [gmsh.model.occ.addCircleArc(
c_points[i - 1], c, c_points[i]) for i in range(len(c_points))]
curve = gmsh.model.occ.addCurveLoop(arcs)
gmsh.model.occ.synchronize()
surface = gmsh.model.occ.addPlaneSurface([curve])
# Create box
p0 = gmsh.model.occ.addPoint(0, 0, 0)
p1 = gmsh.model.occ.addPoint(L, 0, 0)
p2 = gmsh.model.occ.addPoint(L, H, 0)
p3 = gmsh.model.occ.addPoint(0, H, 0)
ps = [p0, p1, p2, p3]
lines = [gmsh.model.occ.addLine(ps[i - 1], ps[i]) for i in range(len(ps))]
curve2 = gmsh.model.occ.addCurveLoop(lines)
surface2 = gmsh.model.occ.addPlaneSurface([curve, curve2])
# Synchronize and create physical tags
gmsh.model.occ.synchronize()
gmsh.model.addPhysicalGroup(2, [surface])
bndry = gmsh.model.getBoundary([(2, surface)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry]
gmsh.model.addPhysicalGroup(2, [surface2], 2)
bndry2 = gmsh.model.getBoundary([(2, surface2)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry2]
gmsh.model.mesh.field.add("Distance", 1)
gmsh.model.mesh.field.setNumbers(1, "NodesList", [c])
gmsh.model.mesh.field.add("Threshold", 2)
gmsh.model.mesh.field.setNumber(2, "IField", 1)
gmsh.model.mesh.field.setNumber(2, "LcMin", res)
gmsh.model.mesh.field.setNumber(2, "LcMax", 2 * res)
gmsh.model.mesh.field.setNumber(2, "DistMin", 0.3)
gmsh.model.mesh.field.setNumber(2, "DistMax", 0.6)
if quads:
gmsh.option.setNumber("Mesh.RecombinationAlgorithm", 8)
gmsh.option.setNumber("Mesh.RecombineAll", 2)
gmsh.option.setNumber("Mesh.SubdivisionAlgorithm", 1)
gmsh.model.mesh.field.setAsBackgroundMesh(2)
gmsh.model.mesh.generate(2)
gmsh.model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_circle_circle_mesh(filename: str, quads: bool = False, res: float = 0.1, order: int = 1):
"""
Create two circular meshes, with radii 0.3 and 0.6 with centers (0.5,0.5) and (0.5, -0.5)
"""
center = [0.5, 0.5, 0]
r = 0.3
angle = np.pi / 4
gmsh.initialize()
if MPI.COMM_WORLD.rank == 0:
# Create circular mesh (divided into 4 segments)
c = gmsh.model.occ.addPoint(center[0], center[1], center[2])
# Add 4 points on circle (clockwise, starting in top left)
angles = [angle, -angle, np.pi + angle, np.pi - angle]
c_points = [gmsh.model.occ.addPoint(center[0] + r * np.cos(angle), center[1]
+ r * np.sin(angle), center[2]) for angle in angles]
arcs = [gmsh.model.occ.addCircleArc(
c_points[i - 1], c, c_points[i]) for i in range(len(c_points))]
curve = gmsh.model.occ.addCurveLoop(arcs)
gmsh.model.occ.synchronize()
surface = gmsh.model.occ.addPlaneSurface([curve])
# Create 2nd circular mesh (divided into 4 segments)
center2 = [0.5, -0.5, 0]
c2 = gmsh.model.occ.addPoint(center2[0], center2[1], center2[2])
# Add 4 points on circle (clockwise, starting in top left)
c_points2 = [gmsh.model.occ.addPoint(center2[0] + 2 * r * np.cos(angle), center2[1]
+ 2 * r * np.sin(angle), center2[2]) for angle in angles]
arcs2 = [gmsh.model.occ.addCircleArc(
c_points2[i - 1], c2, c_points2[i]) for i in range(len(c_points2))]
curve2 = gmsh.model.occ.addCurveLoop(arcs2)
gmsh.model.occ.synchronize()
surface2 = gmsh.model.occ.addPlaneSurface([curve, curve2])
# Synchronize and create physical tags
gmsh.model.occ.addPoint(0.5, 0.2, 0, tag=17)
gmsh.model.occ.synchronize()
gmsh.model.addPhysicalGroup(2, [surface])
bndry = gmsh.model.getBoundary([(2, surface)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry]
gmsh.model.addPhysicalGroup(2, [surface2], 2)
bndry2 = gmsh.model.getBoundary([(2, surface2)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry2]
gmsh.model.mesh.field.add("Distance", 1)
gmsh.model.mesh.field.setNumbers(1, "NodesList", [17])
gmsh.model.mesh.field.add("Threshold", 2)
gmsh.model.mesh.field.setNumber(2, "IField", 1)
gmsh.model.mesh.field.setNumber(2, "LcMin", res)
gmsh.model.mesh.field.setNumber(2, "LcMax", 3 * res)
gmsh.model.mesh.field.setNumber(2, "DistMin", 0.3)
gmsh.model.mesh.field.setNumber(2, "DistMax", 0.6)
if quads:
gmsh.option.setNumber("Mesh.RecombinationAlgorithm", 8)
gmsh.option.setNumber("Mesh.RecombineAll", 2)
gmsh.option.setNumber("Mesh.SubdivisionAlgorithm", 1)
gmsh.model.mesh.field.setAsBackgroundMesh(2)
gmsh.model.mesh.generate(2)
gmsh.model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_box_mesh_2D(filename: str, quads: bool = False, res=0.1, order: int = 1):
"""
Create two boxes, one slightly skewed
"""
L = 0.5
H = 0.5
disp = -0.6
delta = 0.1
gmsh.initialize()
if MPI.COMM_WORLD.rank == 0:
gmsh.option.setNumber("Mesh.CharacteristicLengthFactor", res)
# Create box
p0 = gmsh.model.occ.addPoint(-delta, 0, 0)
p1 = gmsh.model.occ.addPoint(L - delta, delta, 0)
p2 = gmsh.model.occ.addPoint(L - delta, H + delta, 0)
p3 = gmsh.model.occ.addPoint(-delta, H, 0)
ps = [p0, p1, p2, p3]
lines = [gmsh.model.occ.addLine(ps[i - 1], ps[i]) for i in range(len(ps))]
curve = gmsh.model.occ.addCurveLoop(lines)
surface = gmsh.model.occ.addPlaneSurface([curve])
# Create box
p4 = gmsh.model.occ.addPoint(0, 0 + disp, 0)
p5 = gmsh.model.occ.addPoint(L, 0 + disp, 0)
p6 = gmsh.model.occ.addPoint(L, H + disp, 0)
p7 = gmsh.model.occ.addPoint(0, H + disp, 0)
ps2 = [p4, p5, p6, p7]
lines2 = [gmsh.model.occ.addLine(ps2[i - 1], ps2[i]) for i in range(len(ps2))]
curve2 = gmsh.model.occ.addCurveLoop(lines2)
surface2 = gmsh.model.occ.addPlaneSurface([curve2])
gmsh.model.occ.synchronize()
res = 0.1
# Set mesh sizes on the points from the surface we are extruding
top_nodes = gmsh.model.getBoundary([(2, surface)], recursive=True, oriented=False)
gmsh.model.occ.mesh.setSize(top_nodes, res)
bottom_nodes = gmsh.model.getBoundary([(2, surface2)], recursive=True, oriented=False)
gmsh.model.occ.mesh.setSize(bottom_nodes, 2 * res)
# Synchronize and create physical tags
gmsh.model.occ.synchronize()
gmsh.model.addPhysicalGroup(2, [surface])
bndry = gmsh.model.getBoundary([(2, surface)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry]
gmsh.model.addPhysicalGroup(2, [surface2], 2)
bndry2 = gmsh.model.getBoundary([(2, surface2)], oriented=False)
[gmsh.model.addPhysicalGroup(b[0], [b[1]]) for b in bndry2]
if quads:
gmsh.option.setNumber("Mesh.RecombinationAlgorithm", 8)
gmsh.option.setNumber("Mesh.RecombineAll", 2)
gmsh.option.setNumber("Mesh.SubdivisionAlgorithm", 1)
gmsh.model.mesh.generate(2)
gmsh.model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_box_mesh_3D(filename: str, simplex: bool = True, res=0.1, order: int = 1):
"""
Create two boxes lying directly over eachother with a gap in between"""
L = 0.5
H = 0.5
W = 0.5
disp = -0.6
gmsh.initialize()
model = gmsh.model
if MPI.COMM_WORLD.rank == 0:
# Create box
if simplex:
model.occ.add_box(0, 0, 0, L, H, W)
model.occ.add_box(0, 0, disp, L, H, W)
model.occ.synchronize()
else:
square1 = model.occ.add_rectangle(0, 0, 0, L, H)
square2 = model.occ.add_rectangle(0, 0, disp, L, H)
model.occ.extrude([(2, square1)], 0, 0, H, numElements=[5], recombine=True)
model.occ.extrude([(2, square2)], 0, 0, H, numElements=[2], recombine=True)
model.occ.synchronize()
volumes = model.getEntities(3)
model.mesh.field.add("Box", 1)
model.mesh.field.setNumber(1, "VIn", res / 5.)
model.mesh.field.setNumber(1, "VOut", res)
model.mesh.field.setNumber(1, "XMin", 0)
model.mesh.field.setNumber(1, "XMax", L)
model.mesh.field.setNumber(1, "YMin", 0)
model.mesh.field.setNumber(1, "YMax", H)
model.mesh.field.setNumber(1, "ZMin", 0)
model.mesh.field.setNumber(1, "ZMax", W)
model.mesh.field.setAsBackgroundMesh(1)
# Synchronize and create physical tags
model.occ.synchronize()
model.addPhysicalGroup(volumes[0][0], [volumes[0][1]])
bndry = model.getBoundary([(3, volumes[0][1])], oriented=False)
[model.addPhysicalGroup(b[0], [b[1]]) for b in bndry]
model.addPhysicalGroup(3, [volumes[1][1]])
bndry2 = model.getBoundary([(3, volumes[1][1])], oriented=False)
[model.addPhysicalGroup(b[0], [b[1]]) for b in bndry2]
if not simplex:
gmsh.option.setNumber("Mesh.RecombinationAlgorithm", 2)
gmsh.option.setNumber("Mesh.RecombineAll", 2)
gmsh.option.setNumber("Mesh.SubdivisionAlgorithm", 1)
model.mesh.generate(3)
model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_sphere_plane_mesh(filename: str, order: int = 1):
"""
Create a 3D sphere with center (0,0,0), r=0.3
with a box at [-0.3, 0.6] x [-0.3, 0.6] x [ -0.1, -0.5]
"""
center = [0.0, 0.0, 0.0]
r = 0.3
angle = np.pi / 8
gap = 0.05
H = 0.05
theta = 0 # np.pi / 10
LcMin = 0.05 * r
LcMax = 0.2 * r
gmsh.initialize()
if MPI.COMM_WORLD.rank == 0:
# Create sphere composed of of two volumes
sphere_bottom = gmsh.model.occ.addSphere(center[0], center[1], center[2], r, angle1=-np.pi / 2, angle2=-angle)
p0 = gmsh.model.occ.addPoint(center[0], center[1], center[2] - r)
sphere_top = gmsh.model.occ.addSphere(center[0], center[1], center[2], r, angle1=-angle, angle2=np.pi / 2)
out_vol_tags, _ = gmsh.model.occ.fragment([(3, sphere_bottom)], [(3, sphere_top)])
# Add bottom box
box = gmsh.model.occ.add_box(center[0] - r, center[1] - r, center[2] - r - gap - H, 3 * r, 3 * r, H)
# Rotate after marking boundaries
gmsh.model.occ.rotate([(3, box)], center[0], center[1], center[2]
- r - 3 * gap, 1, 0, 0, theta)
# Synchronize and create physical tags
gmsh.model.occ.synchronize()
sphere_boundary = gmsh.model.getBoundary(out_vol_tags, oriented=False)
for boundary_tag in sphere_boundary:
gmsh.model.addPhysicalGroup(boundary_tag[0], boundary_tag[1:2])
box_boundary = gmsh.model.getBoundary([(3, box)], oriented=False)
for boundary_tag in box_boundary:
gmsh.model.addPhysicalGroup(boundary_tag[0], boundary_tag[1:2])
p_v = [v_tag[1] for v_tag in out_vol_tags]
gmsh.model.addPhysicalGroup(3, p_v)
gmsh.model.addPhysicalGroup(3, [box])
gmsh.model.occ.synchronize()
gmsh.model.mesh.field.add("Distance", 1)
gmsh.model.mesh.field.setNumbers(1, "NodesList", [p0])
gmsh.model.mesh.field.add("Threshold", 2)
gmsh.model.mesh.field.setNumber(2, "IField", 1)
gmsh.model.mesh.field.setNumber(2, "LcMin", LcMin)
gmsh.model.mesh.field.setNumber(2, "LcMax", LcMax)
gmsh.model.mesh.field.setNumber(2, "DistMin", 0.5 * r)
gmsh.model.mesh.field.setNumber(2, "DistMax", r)
gmsh.model.mesh.field.setAsBackgroundMesh(2)
gmsh.model.mesh.generate(3)
gmsh.model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_sphere_sphere_mesh(filename: str, order: int = 1):
"""
Create a 3D mesh consisting of two spheres with radii 0.3 and 0.6 and
centers (0.5,0.5,0.5) and (0.5,0.5,-0.5)
"""
center = [0.5, 0.5, 0.5]
r = 0.3
angle = np.pi / 8
LcMin = 0.05 * r
LcMax = 0.2 * r
gmsh.initialize()
if MPI.COMM_WORLD.rank == 0:
# Create sphere 1 composed of of two volumes
sphere_bottom = gmsh.model.occ.addSphere(center[0], center[1], center[2], r, angle1=-np.pi / 2, angle2=-angle)
p0 = gmsh.model.occ.addPoint(center[0], center[1], center[2] - r)
sphere_top = gmsh.model.occ.addSphere(center[0], center[1], center[2], r, angle1=-angle, angle2=np.pi / 2)
out_vol_tags, _ = gmsh.model.occ.fragment([(3, sphere_bottom)], [(3, sphere_top)])
# Create sphere 2 composed of of two volumes
sphere_bottom2 = gmsh.model.occ.addSphere(
center[0], center[1], -center[2], 2 * r, angle1=-np.pi / 2, angle2=-angle)
p1 = gmsh.model.occ.addPoint(center[0], center[1], -center[2] - 2 * r)
sphere_top2 = gmsh.model.occ.addSphere(center[0], center[1], -center[2], 2 * r, angle1=-angle, angle2=np.pi / 2)
out_vol_tags2, _ = gmsh.model.occ.fragment([(3, sphere_bottom2)], [(3, sphere_top2)])
# Synchronize and create physical tags
gmsh.model.occ.synchronize()
sphere_boundary = gmsh.model.getBoundary(out_vol_tags, oriented=False)
for boundary_tag in sphere_boundary:
gmsh.model.addPhysicalGroup(boundary_tag[0], boundary_tag[1:2])
sphere_boundary2 = gmsh.model.getBoundary(out_vol_tags2, oriented=False)
for boundary_tag in sphere_boundary2:
gmsh.model.addPhysicalGroup(boundary_tag[0], boundary_tag[1:2])
p_v = [v_tag[1] for v_tag in out_vol_tags]
p_v2 = [v_tag[1] for v_tag in out_vol_tags2]
gmsh.model.addPhysicalGroup(3, p_v)
gmsh.model.addPhysicalGroup(3, p_v2)
gmsh.model.occ.synchronize()
gmsh.model.mesh.field.add("Distance", 1)
gmsh.model.mesh.field.setNumbers(1, "NodesList", [p0, p1])
gmsh.model.mesh.field.add("Threshold", 2)
gmsh.model.mesh.field.setNumber(2, "IField", 1)
gmsh.model.mesh.field.setNumber(2, "LcMin", LcMin)
gmsh.model.mesh.field.setNumber(2, "LcMax", LcMax)
gmsh.model.mesh.field.setNumber(2, "DistMin", 0.5 * r)
gmsh.model.mesh.field.setNumber(2, "DistMax", r)
gmsh.model.mesh.field.setAsBackgroundMesh(2)
gmsh.model.mesh.generate(3)
gmsh.model.mesh.setOrder(order)
# gmsh.option.setNumber("Mesh.SaveAll", 1)
gmsh.write(filename)
MPI.COMM_WORLD.Barrier()
gmsh.finalize()
def create_cylinder_cylinder_mesh(filename: str, order: int = 1, res=0.25, simplex: bool = False):
if MPI.COMM_WORLD.rank == 0:
gmsh.initialize()
model = gmsh.model()
# Generate a mesh with 2nd-order hexahedral cells using gmsh
model_name = "Cylinder-cylinder mesh"
model.add(model_name)
model.setCurrent(model_name)
# Recombine tetrahedrons to hexahedrons
if not simplex:
gmsh.option.setNumber("Mesh.RecombinationAlgorithm", 2)
gmsh.option.setNumber("Mesh.RecombineAll", 2)
# gmsh.option.setNumber("Mesh.CharacteristicLengthFactor", 0.1)
center1 = (0, 0, 0.5)
r1 = 0.8
l1 = 1
Nl1 = int(1 / res)
circle = model.occ.addDisk(*center1, r1, r1)
model.occ.rotate([(2, circle)], 0, 0, 0, 1, 0, 0, np.pi / 2)
model.occ.extrude(
[(2, circle)], 0, l1, 0, numElements=[Nl1], recombine=not simplex)
center2 = (2, 0, -0.5)
r2 = 0.5
l2 = 1
Nl2 = int(1 / res)
circle2 = model.occ.addDisk(*center2, r2, r2)
model.occ.extrude(
[(2, circle2)], 0, 0, l2, numElements=[Nl2], recombine=not simplex)
gmsh.model.mesh.field.add("Box", 1)
gmsh.model.mesh.field.setNumber(1, "VIn", res)
gmsh.model.mesh.field.setNumber(1, "VOut", res)
gmsh.model.mesh.field.setNumber(1, "XMin", center1[0] - l1)
gmsh.model.mesh.field.setNumber(1, "XMax", center1[0] + l1)
gmsh.model.mesh.field.setNumber(1, "YMin", center1[1] - 2 * r1)
gmsh.model.mesh.field.setNumber(1, "YMax", center1[1] + 2 * r1)
gmsh.model.mesh.field.setNumber(1, "ZMin", center1[2] - 2 * r1)
gmsh.model.mesh.field.setNumber(1, "ZMax", center1[2] + 2 * r1)
gmsh.model.mesh.field.setAsBackgroundMesh(1)
model.occ.synchronize()
model.mesh.generate(3)
model.mesh.setOrder(order)
volume_entities = []
for entity in model.getEntities(3):
volume_entities.append(entity[1])
model.addPhysicalGroup(3, volume_entities, tag=1)
model.setPhysicalName(3, 1, "Mesh volume")
# Sort mesh nodes according to their index in gmsh
x = extract_gmsh_geometry(model, model.getCurrent())
ct = "tetrahedron" if simplex else "hexahedron"
# Broadcast cell type data and geometric dimension
gmsh_cell_id = MPI.COMM_WORLD.bcast(
model.mesh.getElementType(ct, order), root=0)
# Get mesh data for dim (0, tdim) for all physical entities
topologies = extract_gmsh_topology_and_markers(model, model.getCurrent())
cells = topologies[gmsh_cell_id]["topology"]
num_nodes = MPI.COMM_WORLD.bcast(cells.shape[1], root=0)
gmsh.finalize()
else:
gmsh_cell_id = MPI.COMM_WORLD.bcast(None, root=0)
num_nodes = MPI.COMM_WORLD.bcast(None, root=0)
cells, x = np.empty([0, num_nodes]), np.empty([0, 3])
# Permute the mesh topology from GMSH ordering to DOLFINx ordering
domain = ufl_mesh_from_gmsh(gmsh_cell_id, 3)
cell_type = CellType.tetrahedron if simplex else CellType.hexahedron
gmsh_hex = cell_perm_gmsh(cell_type, num_nodes)
cells = cells[:, gmsh_hex]
msh = create_mesh(MPI.COMM_WORLD, cells, x, domain)
msh.name = "cylinder_cylinder"
# Permute also entities which are tagged
with XDMFFile(MPI.COMM_WORLD, f"{filename}.xdmf", "w") as file:
file.write_mesh(msh)
```
#### File: dolfinx_contact/unbiased/nitsche_unbiased.py
```python
from typing import Callable, Tuple, Union
import dolfinx.common as _common
import dolfinx.fem as _fem
import dolfinx.log as _log
import dolfinx.mesh as _mesh
import dolfinx_cuas
import numpy as np
import ufl
from dolfinx.cpp.graph import AdjacencyList_int32
from dolfinx.cpp.mesh import MeshTags_int32
from petsc4py import PETSc as _PETSc
import dolfinx_contact
import dolfinx_contact.cpp
from dolfinx_contact.helpers import (epsilon, lame_parameters,
rigid_motions_nullspace, sigma_func)
kt = dolfinx_contact.cpp.Kernel
__all__ = ["nitsche_unbiased"]
def nitsche_unbiased(mesh: _mesh.Mesh, mesh_tags: list[MeshTags_int32],
domain_marker: MeshTags_int32,
surfaces: AdjacencyList_int32,
dirichlet: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
neumann: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
contact_pairs: list[Tuple[int, int]],
body_forces: list[Tuple[int, Callable[[np.ndarray], np.ndarray]]],
physical_parameters: dict[str, Union[bool, np.float64, int]],
nitsche_parameters: dict[str, np.float64],
quadrature_degree: int = 5, form_compiler_params: dict = None, jit_params: dict = None,
petsc_options: dict = None, newton_options: dict = None, initial_guess=None,
outfile: str = None, order: int = 1) -> Tuple[_fem.Function, int, int, float]:
"""
Use custom kernel to compute the contact problem with two elastic bodies coming into contact.
Parameters
==========
mesh
The input mesh
mesh_tags
A list of meshtags. The first element must contain the mesh_tags for all puppet surfaces,
Dirichlet-surfaces and Neumann-surfaces
All further elements may contain candidate_surfaces
domain_marker
marker for subdomains where a body force is applied
surfaces
Adjacency list. Links of i are meshtag values for contact surfaces in ith mesh_tag in mesh_tags
dirichlet
List of Dirichlet boundary conditions as pairs of (meshtag value, function), where function
is a function to be interpolated into the dolfinx function space
neumann
Same as dirichlet for Neumann boundary conditions
contact_pairs:
list of pairs (i, j) marking the ith surface as a puppet surface and the jth surface
as the corresponding candidate surface
physical_parameters
Optional dictionary with information about the linear elasticity problem.
Valid (key, value) tuples are: ('E': float), ('nu', float), ('strain', bool)
nitsche_parameters
Optional dictionary with information about the Nitsche configuration.
Valid (keu, value) tuples are: ('gamma', float), ('theta', float) where theta can be -1, 0 or 1 for
skew-symmetric, penalty like or symmetric enforcement of Nitsche conditions
displacement
The displacement enforced on Dirichlet boundary
quadrature_degree
The quadrature degree to use for the custom contact kernels
form_compiler_params
Parameters used in FFCX compilation of this form. Run `ffcx --help` at
the commandline to see all available options. Takes priority over all
other parameter values, except for `scalar_type` which is determined by
DOLFINX.
jit_params
Parameters used in CFFI JIT compilation of C code generated by FFCX.
See https://github.com/FEniCS/dolfinx/blob/main/python/dolfinx/jit.py
for all available parameters. Takes priority over all other parameter values.
petsc_options
Parameters that is passed to the linear algebra backend
PETSc. For available choices for the 'petsc_options' kwarg,
see the `PETSc-documentation
<https://petsc4py.readthedocs.io/en/stable/manual/ksp/>`
newton_options
Dictionary with Newton-solver options. Valid (key, item) tuples are:
("atol", float), ("rtol", float), ("convergence_criterion", "str"),
("max_it", int), ("error_on_nonconvergence", bool), ("relaxation_parameter", float)
initial_guess
A functon containing an intial guess to use for the Newton-solver
outfile
File to append solver summary
order
The order of mesh and function space
"""
form_compiler_params = {} if form_compiler_params is None else form_compiler_params
jit_params = {} if jit_params is None else jit_params
petsc_options = {} if petsc_options is None else petsc_options
newton_options = {} if newton_options is None else newton_options
strain = physical_parameters.get("strain")
if strain is None:
raise RuntimeError("Need to supply if problem is plane strain (True) or plane stress (False)")
else:
plane_strain = bool(strain)
_E = physical_parameters.get("E")
if _E is not None:
E = np.float64(_E)
else:
raise RuntimeError("Need to supply Youngs modulus")
if physical_parameters.get("nu") is None:
raise RuntimeError("Need to supply Poisson's ratio")
else:
nu = physical_parameters.get("nu")
# Compute lame parameters
mu_func, lambda_func = lame_parameters(plane_strain)
mu = mu_func(E, nu)
lmbda = lambda_func(E, nu)
sigma = sigma_func(mu, lmbda)
# Nitche parameters and variables
theta = nitsche_parameters.get("theta")
if theta is None:
raise RuntimeError("Need to supply theta for Nitsche imposition of boundary conditions")
_gamma = nitsche_parameters.get("gamma")
if _gamma is None:
raise RuntimeError("Need to supply Coercivity/Stabilization parameter for Nitsche condition")
else:
gamma: np.float64 = _gamma * E
lifting = nitsche_parameters.get("lift_bc", False)
# Functions space and FEM functions
V = _fem.VectorFunctionSpace(mesh, ("CG", order))
u = _fem.Function(V)
v = ufl.TestFunction(V)
du = ufl.TrialFunction(V)
h = ufl.CellDiameter(mesh)
n = ufl.FacetNormal(mesh)
# Integration measure and ufl part of linear/bilinear form
# metadata = {"quadrature_degree": quadrature_degree}
dx = ufl.Measure("dx", domain=mesh, subdomain_data=domain_marker)
ds = ufl.Measure("ds", domain=mesh, # metadata=metadata,
subdomain_data=mesh_tags[0])
J = ufl.inner(sigma(du), epsilon(v)) * dx
F = ufl.inner(sigma(u), epsilon(v)) * dx
for contact_pair in contact_pairs:
surface_value = int(surfaces.links(0)[contact_pair[0]])
J += - 0.5 * theta * h / gamma * ufl.inner(sigma(du) * n, sigma(v) * n) * \
ds(surface_value)
F += - 0.5 * theta * h / gamma * ufl.inner(sigma(u) * n, sigma(v) * n) * \
ds(surface_value)
# Dirichle boundary conditions
bcs = []
if lifting:
tdim = mesh.topology.dim
for bc in dirichlet:
facets = mesh_tags[0].find(bc[0])
cells = _mesh.compute_incident_entities(mesh, facets, tdim - 1, tdim)
u_bc = _fem.Function(V)
u_bc.interpolate(bc[1], cells)
u_bc.x.scatter_forward()
bcs.append(_fem.dirichletbc(u_bc, _fem.locate_dofs_topological(V, tdim - 1, facets)))
else:
for bc in dirichlet:
f = _fem.Function(V)
f.interpolate(bc[1])
F += - ufl.inner(sigma(u) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, u - f) * \
ds(bc[0]) + gamma / h * ufl.inner(u - f, v) * ds(bc[0])
J += - ufl.inner(sigma(du) * n, v) * ds(bc[0])\
- theta * ufl.inner(sigma(v) * n, du) * \
ds(bc[0]) + gamma / h * ufl.inner(du, v) * ds(bc[0])
# Neumann boundary conditions
for bc in neumann:
g = _fem.Function(V)
g.interpolate(bc[1])
F -= ufl.inner(g, v) * ds(bc[0])
# body forces
for bf in body_forces:
f = _fem.Function(V)
f.interpolate(bf[1])
F -= ufl.inner(f, v) * dx(bf[0])
# Custom assembly
# create contact class
with _common.Timer("~Contact: Init"):
contact = dolfinx_contact.cpp.Contact(mesh_tags, surfaces, contact_pairs,
V._cpp_object, quadrature_degree=quadrature_degree)
with _common.Timer("~Contact: Distance maps"):
for i in range(len(contact_pairs)):
contact.create_distance_map(i)
# pack constants
consts = np.array([gamma, theta])
# Pack material parameters mu and lambda on each contact surface
with _common.Timer("~Contact: Interpolate coeffs (mu, lmbda)"):
V2 = _fem.FunctionSpace(mesh, ("DG", 0))
lmbda2 = _fem.Function(V2)
lmbda2.interpolate(lambda x: np.full((1, x.shape[1]), lmbda))
mu2 = _fem.Function(V2)
mu2.interpolate(lambda x: np.full((1, x.shape[1]), mu))
entities = []
with _common.Timer("~Contact: Compute active entities"):
for pair in contact_pairs:
entities.append(contact.active_entities(pair[0]))
material = []
with _common.Timer("~Contact: Pack coeffs (mu, lmbda"):
for i in range(len(contact_pairs)):
material.append(dolfinx_cuas.pack_coefficients([mu2, lmbda2], entities[i]))
# Pack celldiameter on each surface
h_packed = []
with _common.Timer("~Contact: Compute and pack celldiameter"):
surface_cells = np.unique(np.hstack([entities[i][:, 0] for i in range(len(contact_pairs))]))
h_int = _fem.Function(V2)
expr = _fem.Expression(h, V2.element.interpolation_points)
h_int.interpolate(expr, surface_cells)
for i in range(len(contact_pairs)):
h_packed.append(dolfinx_cuas.pack_coefficients([h_int], entities[i]))
# Pack gap, normals and test functions on each surface
gaps = []
normals = []
test_fns = []
with _common.Timer("~Contact: Pack gap, normals, testfunction"):
for i in range(len(contact_pairs)):
gaps.append(contact.pack_gap(i))
normals.append(contact.pack_ny(i, gaps[i]))
test_fns.append(contact.pack_test_functions(i, gaps[i]))
# Concatenate all coeffs
coeffs_const = []
for i in range(len(contact_pairs)):
coeffs_const.append(np.hstack([material[i], h_packed[i], gaps[i], normals[i], test_fns[i]]))
# Generate Jacobian data structures
J_custom = _fem.form(J, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate Jacobian kernel"):
kernel_jac = contact.generate_kernel(kt.Jac)
with _common.Timer("~Contact: Create matrix"):
J = contact.create_matrix(J_custom)
# Generate residual data structures
F_custom = _fem.form(F, form_compiler_params=form_compiler_params, jit_params=jit_params)
with _common.Timer("~Contact: Generate residual kernel"):
kernel_rhs = contact.generate_kernel(kt.Rhs)
with _common.Timer("~Contact: Create vector"):
b = _fem.petsc.create_vector(F_custom)
@_common.timed("~Contact: Update coefficients")
def compute_coefficients(x, coeffs):
u.vector[:] = x.array
u_candidate = []
with _common.Timer("~~Contact: Pack u contact"):
for i in range(len(contact_pairs)):
u_candidate.append(contact.pack_u_contact(i, u._cpp_object, gaps[i]))
u_puppet = []
with _common.Timer("~~Contact: Pack u"):
for i in range(len(contact_pairs)):
u_puppet.append(dolfinx_cuas.pack_coefficients([u], entities[i]))
for i in range(len(contact_pairs)):
c_0 = np.hstack([coeffs_const[i], u_puppet[i], u_candidate[i]])
coeffs[i][:, :] = c_0[:, :]
@_common.timed("~Contact: Assemble residual")
def compute_residual(x, b, coeffs):
b.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble vector)"):
for i in range(len(contact_pairs)):
contact.assemble_vector(b, i, kernel_rhs, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble vector)"):
_fem.petsc.assemble_vector(b, F_custom)
# Apply boundary condition
if lifting:
_fem.petsc.apply_lifting(b, [J_custom], bcs=[bcs], x0=[x], scale=-1.0)
b.ghostUpdate(addv=_PETSc.InsertMode.ADD, mode=_PETSc.ScatterMode.REVERSE)
_fem.petsc.set_bc(b, bcs, x, -1.0)
@_common.timed("~Contact: Assemble matrix")
def compute_jacobian_matrix(x, A, coeffs):
A.zeroEntries()
with _common.Timer("~~Contact: Contact contributions (in assemble matrix)"):
for i in range(len(contact_pairs)):
contact.assemble_matrix(A, [], i, kernel_jac, coeffs[i], consts)
with _common.Timer("~~Contact: Standard contributions (in assemble matrix)"):
_fem.petsc.assemble_matrix(A, J_custom, bcs=bcs)
A.assemble()
# coefficient arrays
num_coeffs = contact.coefficients_size()
coeffs = np.array([np.zeros((len(entities[i]), num_coeffs)) for i in range(len(contact_pairs))])
newton_solver = dolfinx_contact.NewtonSolver(mesh.comm, J, b, coeffs)
# Set matrix-vector computations
newton_solver.set_residual(compute_residual)
newton_solver.set_jacobian(compute_jacobian_matrix)
newton_solver.set_coefficients(compute_coefficients)
# Set rigid motion nullspace
null_space = rigid_motions_nullspace(V)
newton_solver.A.setNearNullSpace(null_space)
# Set Newton solver options
newton_solver.set_newton_options(newton_options)
# Set initial guess
if initial_guess is None:
u.x.array[:] = 0
else:
u.x.array[:] = initial_guess.x.array[:]
# Set Krylov solver options
newton_solver.set_krylov_options(petsc_options)
dofs_global = V.dofmap.index_map_bs * V.dofmap.index_map.size_global
_log.set_log_level(_log.LogLevel.OFF)
# Solve non-linear problem
timing_str = f"~Contact: {id(dofs_global)} Solve Nitsche"
with _common.Timer(timing_str):
n, converged = newton_solver.solve(u)
if outfile is not None:
viewer = _PETSc.Viewer().createASCII(outfile, "a")
newton_solver.krylov_solver.view(viewer)
newton_time = _common.timing(timing_str)
if not converged:
raise RuntimeError("Newton solver did not converge")
u.x.scatter_forward()
print(f"{dofs_global}\n Number of Newton iterations: {n:d}\n",
f"Number of Krylov iterations {newton_solver.krylov_iterations}\n", flush=True)
return u, n, newton_solver.krylov_iterations, newton_time[1]
```
#### File: python/tests/test_projection.py
```python
import dolfinx.fem as _fem
import numpy as np
import pytest
from dolfinx.graph import create_adjacencylist
from dolfinx.io import XDMFFile
from dolfinx.mesh import meshtags, locate_entities_boundary
from mpi4py import MPI
import dolfinx_contact
import dolfinx_contact.cpp
from dolfinx_contact.meshing import convert_mesh, create_box_mesh_2D, create_box_mesh_3D
@pytest.mark.parametrize("q_deg", range(1, 4))
@pytest.mark.parametrize("surf", [0, 1])
@pytest.mark.parametrize("dim", [2, 3])
def test_projection(q_deg, surf, dim):
# Create mesh
if dim == 2:
fname = "box_2D"
create_box_mesh_2D(filename=f"{fname}.msh", res=1.0)
convert_mesh(fname, fname, "triangle", prune_z=True)
convert_mesh(f"{fname}", f"{fname}_facets", "line", prune_z=True)
else:
fname = "box_3D"
create_box_mesh_3D(filename=f"{fname}.msh", res=1.0)
convert_mesh(fname, fname, "tetra")
convert_mesh(f"{fname}", f"{fname}_facets", "triangle")
# Read in mesh
with XDMFFile(MPI.COMM_WORLD, f"{fname}.xdmf", "r") as xdmf:
mesh = xdmf.read_mesh(name="Grid")
tdim = mesh.topology.dim
gdim = mesh.geometry.dim
mesh.topology.create_connectivity(tdim - 1, 0)
mesh.topology.create_connectivity(tdim - 1, tdim)
# Surface paramters see contact_meshes.py
L = 0.5
delta = 0.1
disp = -0.6
H = 0.5
# Define surfaces
def surface_0(x):
if dim == 2:
return np.logical_and(np.isclose(x[1], delta * (x[0] + delta) / L), x[1] < delta + 1e-5)
else:
return np.isclose(x[2], 0)
def surface_1(x):
return(np.isclose(x[dim - 1], disp + H))
# define restriced range for x coordinate to ensure closest point is on interior of opposite surface
def x_range(x):
return(np.logical_and(x[0] > delta, x[0] < L - delta))
surface_0_val = 1
surface_1_val = 2
# Create meshtags for surfaces
# restrict range of x coordinate for origin surface
if surf == 0:
facets_0 = locate_entities_boundary(mesh, tdim - 1, lambda x: np.logical_and(surface_0(x), x_range(x)))
facets_1 = locate_entities_boundary(mesh, tdim - 1, surface_1)
else:
facets_0 = locate_entities_boundary(mesh, tdim - 1, surface_0)
facets_1 = locate_entities_boundary(mesh, tdim - 1, lambda x: np.logical_and(surface_1(x), x_range(x)))
values_0 = np.full(len(facets_0), surface_0_val, dtype=np.int32)
values_1 = np.full(len(facets_1), surface_1_val, dtype=np.int32)
indices = np.concatenate([facets_0, facets_1])
values = np.hstack([values_0, values_1])
sorted_ind = np.argsort(indices)
facet_marker = meshtags(mesh, tdim - 1, indices[sorted_ind], values[sorted_ind])
# Functions space
V = _fem.VectorFunctionSpace(mesh, ("CG", 1))
# Create contact class, gap function and normals
data = np.array([surface_0_val, surface_1_val], dtype=np.int32)
offsets = np.array([0, 2], dtype=np.int32)
surfaces = create_adjacencylist(data, offsets)
contact = dolfinx_contact.cpp.Contact([facet_marker], surfaces, [(0, 1), (1, 0)],
V._cpp_object, quadrature_degree=q_deg)
contact.create_distance_map(surf)
gap = contact.pack_gap(surf)
normals = contact.pack_ny(surf, gap)
# Compute dot product and normalise
n_dot = np.zeros((gap.shape[0], gap.shape[1] // gdim))
for facet in range(gap.shape[0]):
for q in range(gap.shape[1] // gdim):
g = gap[facet, q * gdim:(q + 1) * gdim]
n = -normals[facet, q * gdim:(q + 1) * gdim]
n_norm = np.linalg.norm(n)
g_norm = np.linalg.norm(g)
for i in range(gdim):
n_dot[facet, q] += g[i] * n[i] / (n_norm * g_norm)
# Test if angle between -normal and gap function is less than 6.5 degrees
# Is better accuracy needed?
assert(np.allclose(n_dot, np.ones(n_dot.shape)))
``` |
{
"source": "jorgensd/ipyvtk-simple",
"score": 3
} |
#### File: ipyvtk-simple/ipyvtk_simple/throttler.py
```python
import asyncio
from time import time
class Timer:
def __init__(self, timeout, callback):
self._timeout = timeout
self._callback = callback
self._task = asyncio.ensure_future(self._job())
async def _job(self):
await asyncio.sleep(self._timeout)
self._callback()
def cancel(self):
self._task.cancel()
def throttle(wait):
""" Decorator that prevents a function from being called
more than once every wait period. """
def decorator(fn):
time_of_last_call = 0
scheduled = False
new_args, new_kwargs = None, None
def throttled(*args, **kwargs):
nonlocal new_args, new_kwargs, time_of_last_call, scheduled
def call_it():
nonlocal new_args, new_kwargs, time_of_last_call, scheduled
time_of_last_call = time()
fn(*new_args, **new_kwargs)
scheduled = False
time_since_last_call = time() - time_of_last_call
new_args = args
new_kwargs = kwargs
if not scheduled:
new_wait = max(0, wait - time_since_last_call)
Timer(new_wait, call_it)
scheduled = True
return throttled
return decorator
``` |
{
"source": "jorgensd/meshio",
"score": 3
} |
#### File: meshio/_cli/_info.py
```python
import argparse
import numpy as np
from .._helpers import read, reader_map
from ._helpers import _get_version_text
def info(argv=None):
# Parse command line arguments.
parser = _get_info_parser()
args = parser.parse_args(argv)
# read mesh data
mesh = read(args.infile, file_format=args.input_format)
print(mesh)
# check if the cell arrays are consistent with the points
is_consistent = True
for cells in mesh.cells:
if np.any(cells.data > mesh.points.shape[0]):
print("\nATTENTION: Inconsistent mesh. Cells refer to nonexistent points.")
is_consistent = False
break
# check if there are redundant points
if is_consistent:
point_is_used = np.zeros(mesh.points.shape[0], dtype=bool)
for cells in mesh.cells:
point_is_used[cells.data] = True
if np.any(~point_is_used):
print("ATTENTION: Some points are not part of any cell.")
def _get_info_parser():
parser = argparse.ArgumentParser(
description=("Print mesh info."), formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument("infile", type=str, help="mesh file to be read from")
parser.add_argument(
"--input-format",
"-i",
type=str,
choices=sorted(list(reader_map.keys())),
help="input file format",
default=None,
)
parser.add_argument(
"--version",
"-v",
action="version",
version=_get_version_text(),
help="display version information",
)
return parser
``` |
{
"source": "jorgensd/pygmsh",
"score": 2
} |
#### File: pygmsh/test/test_recombine.py
```python
import numpy as np
import pygmsh
def test():
geom = pygmsh.built_in.Geometry()
p0 = geom.add_point((0.0, 0.0, 0.0), lcar=1.0)
p1 = geom.add_point((2.0, 0.0, 0.0), lcar=1.0)
p2 = geom.add_point((0.0, 1.0, 0.0), lcar=1.0)
p3 = geom.add_point((2.0, 1.0, 0.0), lcar=1.0)
l0 = geom.add_line(p0, p1)
l1 = geom.add_line(p1, p3)
l2 = geom.add_line(p3, p2)
l3 = geom.add_line(p2, p0)
ll0 = geom.add_line_loop((l0, l1, l2, l3))
rs0 = geom.add_surface(ll0)
geom.set_transfinite_lines([l3, l1], 3, progression=1)
geom.set_transfinite_lines([l2, l0], 3, progression=1)
geom.set_transfinite_surface(rs0)
geom.set_recombined_surfaces([rs0])
mesh = pygmsh.generate_mesh(geom)
assert "quad" in mesh.cells_dict.keys()
ref = np.array([[0, 4, 8, 7], [7, 8, 6, 2], [4, 1, 5, 8], [8, 5, 3, 6]])
assert np.array_equal(ref, mesh.cells_dict["quad"])
return mesh
if __name__ == "__main__":
import meshio
meshio.write("rectangle_structured.vtk", test())
``` |
{
"source": "jorgensoares/ocloud-flask-base",
"score": 2
} |
#### File: web/auth/controllers.py
```python
from flask_mail import Message
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired
from web import mail
from web.auth.forms import LoginForm, PasswordForgotForm, PasswordResetForm, PasswordChangeForm
from flask_principal import Identity, AnonymousIdentity, identity_changed
from flask import redirect, render_template, flash, url_for, current_app, session
from werkzeug.security import generate_password_hash
from web.functions import get_now
from web.database import db
from flask import Blueprint
from flask_login import login_required, login_user, logout_user
from web.version import __version__ as version
mod_auth = Blueprint('auth', __name__, url_prefix='/auth')
@mod_auth.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = form.user
login_user(user, remember=True)
user.last_login = get_now()
user.login_attempts = 0
db.session.commit()
identity_changed.send(current_app._get_current_object(), identity=Identity(user.id))
flash('Welcome {0} {1}'.format(user.first_name, user.last_name), 'success')
return redirect(url_for("index"))
else:
# failed login actions can go here later on
for field, errors in form.errors.items():
for error in errors:
flash(error, 'warning')
return render_template("index.html", form=form)
@mod_auth.route("/logout", methods=["GET"])
@login_required
def logout():
logout_user()
for key in ('identity.name', 'identity.auth_type'):
session.pop(key, None)
identity_changed.send(current_app._get_current_object(), identity=AnonymousIdentity())
return redirect(url_for("index"))
@mod_auth.route("/password-change", methods=['GET', 'POST'])
@login_required
def password_change():
form = PasswordChangeForm()
if form.validate_on_submit():
user = form.user
user.password = generate_password_hash(form.new_password.data)
db.session.commit()
if current_app.config['MAIL']:
message = '''Hello %s,\n\n This is e-mail is to inform you that you have changed your password successfully.
\nIf this request was not made by you please contact support immediately.\n
\nThank you.\n Pimat\n\n''' % user.username
subject = "Pimat Password Change Notice - %s" % user.username
msg = Message(recipients=[user.email], body=message, subject=subject)
mail.send(msg)
flash('Password changed successfully, you should logout and login again!', 'success')
return redirect(url_for("core.dashboard"))
else:
for field, errors in form.errors.items():
for error in errors:
flash(error, 'warning')
return render_template('auth/password_change.html', version=version)
@mod_auth.route("/password-forgot", methods=['GET', 'POST'])
def password_forgot():
form = PasswordForgotForm()
if form.validate_on_submit():
user_details = form.user
s = Serializer(current_app.config['SECRET_KEY'], expires_in=600)
token = s.dumps({'id': user_details.id})
message = '''Hello, \n\n To reset your password go to: http://%s/password_reset \n\n Token: \n %s''' % \
(current_app.config['SERVER_IP'], token)
subject = "Pimat Password Reset - %s" % user_details.username
msg = Message(recipients=[user_details.email], body=message, subject=subject)
mail.send(msg)
flash('Please verify you mailbox!', 'success')
return redirect(url_for("auth.password_reset"))
else:
for field, errors in form.errors.items():
for error in errors:
flash(error, 'warning')
return render_template('auth/password_forgot.html', version=version, form=form)
@mod_auth.route("/password-reset", methods=['GET', 'POST'])
def password_reset():
form = PasswordResetForm()
if form.validate_on_submit():
user = form.user
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(form.token.data)
except SignatureExpired:
flash('Expired Token', 'danger')
return render_template('auth/password_reset_form.html', version=version, form=form)
except BadSignature:
flash('Invalid Token', 'danger')
return render_template('auth/password_reset_form.html', version=version, form=form)
user.password = generate_password_hash(form.new_password.data)
db.session.commit()
message = '''Hello %s,\n\n This is e-mail is to inform you that you have reset your password successfully.
\nIf this request was not made by you please contact support immediately.\n
\nThank you.\n Pimat\n\n''' % user.username
subject = "Pimat Password Reset Notice - %s" % user.username
msg = Message(recipients=[user.email], body=message, subject=subject)
mail.send(msg)
flash('Password updated successfully, Please login.', 'success')
return redirect(url_for("login"))
else:
for field, errors in form.errors.items():
for error in errors:
flash(error, 'warning')
return render_template('auth/password_reset_form.html', version=version, form=form)
```
#### File: web/auth/ldap_test.py
```python
from flask import Flask, url_for, current_app, session
from flask_ldap3_login import LDAP3LoginManager
from flask_login import LoginManager, login_user, UserMixin, current_user, login_required, logout_user
from flask import render_template_string, redirect
from flask_ldap3_login.forms import LDAPLoginForm
from flask_principal import Principal, identity_loaded, Permission, RoleNeed, UserNeed, identity_changed, Identity, \
AnonymousIdentity
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret'
app.config['DEBUG'] = True
Principal(app)
app.config['ADMIN_GROUP'] = 'CN=Domain Admins,CN=Users,DC=ocloud,DC=cz'
# Setup LDAP Configuration Variables. Change these to your own settings.
# All configuration directives can be found in the documentation.
# Hostname of your LDAP Server
app.config['LDAP_HOST'] = '10.14.10.15'
# Base DN of your directory
app.config['LDAP_BASE_DN'] = 'DC=ocloud,DC=cz'
# Users DN to be prepended to the Base DN
app.config['LDAP_USER_DN'] = 'CN=Users'
# Groups DN to be prepended to the Base DN
app.config['LDAP_GROUP_DN'] = 'CN=Users'
# The RDN attribute for your user schema on LDAP
app.config['LDAP_USER_RDN_ATTR'] = 'cn'
# The Attribute you want users to authenticate to LDAP with.
app.config['LDAP_USER_LOGIN_ATTR'] = 'cn'
# The Username to bind to LDAP with
app.config['LDAP_BIND_USER_DN'] = '<EMAIL>'
# The Password to bind to LDAP with
app.config['LDAP_BIND_USER_PASSWORD'] = '<PASSWORD>'
# SSL settings
app.config['LDAP_PORT'] = 636
app.config['LDAP_USE_SSL'] = True
login_manager = LoginManager(app) # Setup a Flask-Login Manager
ldap_manager = LDAP3LoginManager(app) # Setup a LDAP3 Login Manager.
# Create a dictionary to store the users in when they authenticate
# This example stores users in memory.
users = {}
admin_permission = Permission(RoleNeed(app.config['ADMIN_GROUP']))
# Declare an Object Model for the user, and make it comply with the
# flask-login UserMixin mixin.
class User(UserMixin):
def __init__(self, dn, username, data, memberships):
self.dn = dn
self.username = username
self.data = data
self.memberships = memberships
def __repr__(self):
return self.dn
def get_id(self):
return self.dn
# Declare a User Loader for Flask-Login.
# Simply returns the User if it exists in our 'database', otherwise
# returns None.
@login_manager.user_loader
def load_user(id):
if id in users:
return users[id]
return None
# Declare The User Saver for Flask-Ldap3-Login
# This method is called whenever a LDAPLoginForm() successfully validates.
# Here you have to save the user, and return it so it can be used in the
# login controller.
@ldap_manager.save_user
def save_user(dn, username, data, memberships):
user = User(dn, username, data, memberships)
users[dn] = user
return user
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
# Set the identity user object
identity.user = current_user
if hasattr(current_user, 'dn'):
identity.provides.add(UserNeed(current_user.dn))
if hasattr(current_user, 'data'):
for role in current_user.data['memberOf']:
identity.provides.add(RoleNeed(role))
# Declare some routes for usage to show the authentication process.
@app.route('/')
def home():
# Redirect users who are not logged in.
if not current_user or current_user.is_anonymous:
return redirect(url_for('login'))
# User is logged in, so show them a page with their cn and dn.
template = """
<h1>Welcome: {{ current_user.data.cn }}</h1>
<h1>Welcome: {{ current_user.data.memberOf }}</h1>
<h2>{{ current_user.dn }}</h2>
<h2>{{ current_user.id }}</h2>
"""
return render_template_string(template)
@app.route('/login', methods=['GET', 'POST'])
def login():
template = """
{{ get_flashed_messages() }}
{{ form.errors }}
<form method="POST">
<label>Username{{ form.username() }}</label>
<label>Password{{ form.password() }}</label>
{{ form.submit() }}
{{ form.hidden_tag() }}
</form>
"""
# Instantiate a LDAPLoginForm which has a validator to check if the user
# exists in LDAP.
form = LDAPLoginForm()
if form.validate_on_submit():
# Successfully logged in, We can now access the saved user object
# via form.user.
login_user(form.user) # Tell flask-login to log them in.
print(current_user.dn)
print(current_user.data['memberOf'])
identity_changed.send(current_app._get_current_object(), identity=Identity(current_user.dn))
return redirect('/') # Send them home
return render_template_string(template, form=form)
@app.route("/admin", methods=['GET', 'POST'])
@admin_permission.require()
@login_required
def admin():
template = """
<h1>Welcome: {{ current_user.data.cn }}</h1>
<h1>Memberships: {{ current_user.data.memberOf }}</h1>
<h2>{{ current_user.dn }}</h2>
<h2>Admin Role: {{ config.ADMIN_GROUP }}</h2>
"""
return render_template_string(template)
@app.route("/logout", methods=["GET"])
def logout():
logout_user()
for key in ('identity.name', 'identity.auth_type'):
session.pop(key, None)
identity_changed.send(current_app._get_current_object(), identity=AnonymousIdentity())
return redirect(url_for("home"))
if __name__ == '__main__':
app.run()
```
#### File: web/core/controllers.py
```python
from flask import render_template, Blueprint
from flask_login import login_required
from web.version import __version__ as version
mod_core = Blueprint('core', __name__, url_prefix='/core')
@mod_core.route("/dashboard")
@login_required
def dashboard():
return render_template("core/dashboard.html", version=version)
``` |
{
"source": "jorgensoares/pimat",
"score": 3
} |
#### File: pimat/pimat_web/api.py
```python
from flask_restful import Resource, reqparse, marshal, fields
from models import db, Sensors, Schedules, RelayLogger, Monitoring
schedules_fields = {
'start_time': fields.String,
'stop_time': fields.String,
'relay': fields.String,
'id': fields.String
}
class SensorsAPI(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('timestamp', type=str, required=True, location='json')
self.reqparse.add_argument('temperature1', type=float, default="", location='json')
self.reqparse.add_argument('temperature2', type=float, default="", location='json')
self.reqparse.add_argument('humidity', type=float, default="", location='json')
self.reqparse.add_argument('light1', type=float, default="", location='json')
self.reqparse.add_argument('pressure', type=float, default="", location='json')
self.reqparse.add_argument('altitude', type=float, default="", location='json')
self.reqparse.add_argument('source', type=str, required=True, location='json')
super(SensorsAPI, self).__init__()
def post(self):
args = self.reqparse.parse_args()
reading = Sensors(args['timestamp'], args['temperature1'], args['temperature2'], args['humidity'],
args['light1'], args['pressure'], args['altitude'], args['source'])
db.session.add(reading)
db.session.commit()
return {'status': 'success', 'data': args}, 201
class SchedulesAPI(Resource):
def get(self):
schedules = Schedules.query.order_by(Schedules.relay.asc()).all()
return {'schedules': [marshal(schedule, schedules_fields) for schedule in schedules]}, 200
class RelayLoggerAPI(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('timestamp', type=str, required=True, location='json')
self.reqparse.add_argument('relay', type=str, required=True, default="", location='json')
self.reqparse.add_argument('pin', type=int, required=True, default="", location='json')
self.reqparse.add_argument('action', type=str, required=True, default="", location='json')
self.reqparse.add_argument('value', type=str, required=True, default="", location='json')
self.reqparse.add_argument('type', type=str, default="", location='json')
self.reqparse.add_argument('source', type=str, required=True, default="", location='json')
super(RelayLoggerAPI, self).__init__()
def post(self):
args = self.reqparse.parse_args()
action = RelayLogger(args['timestamp'], args['relay'], args['pin'], args['action'], args['value'], args['type'],
args['source'])
db.session.add(action)
db.session.commit()
return {'status': 'success', 'data': args}, 201
class MonitoringAPI(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('timestamp', type=str, required=True, location='json')
self.reqparse.add_argument('hostname', type=str, required=True, default="", location='json')
self.reqparse.add_argument('ip_eth0', type=str, default="", location='json')
self.reqparse.add_argument('ip_wlan0', type=str, default="", location='json')
self.reqparse.add_argument('timezone', type=str, default="", location='json')
self.reqparse.add_argument('boot_time', type=float, default="", location='json')
self.reqparse.add_argument('cpu_temp', type=float, default="", location='json')
self.reqparse.add_argument('cpu_usage', type=float, default="", location='json')
self.reqparse.add_argument('cpu_frequency', type=float, default="", location='json')
self.reqparse.add_argument('load_1', type=float, default="", location='json')
self.reqparse.add_argument('load_5', type=float, default="", location='json')
self.reqparse.add_argument('load_15', type=float, default="", location='json')
self.reqparse.add_argument('total_proc', type=int, default="", location='json')
self.reqparse.add_argument('ram_total', type=str, default="", location='json')
self.reqparse.add_argument('ram_free', type=str, default="", location='json')
self.reqparse.add_argument('ram_used', type=str, default="", location='json')
self.reqparse.add_argument('ram_used_percent', type=float, default="", location='json')
self.reqparse.add_argument('swap_total', type=str, default="", location='json')
self.reqparse.add_argument('swap_free', type=str, default="", location='json')
self.reqparse.add_argument('swap_used', type=str, default="", location='json')
self.reqparse.add_argument('swap_used_percent', type=float, default="", location='json')
self.reqparse.add_argument('disk_total', type=str, default="", location='json')
self.reqparse.add_argument('disk_used', type=str, default="", location='json')
self.reqparse.add_argument('disk_free', type=str, default="", location='json')
self.reqparse.add_argument('disk_used_percent', type=float, default="", location='json')
self.reqparse.add_argument('disk_total_boot', type=str, default="", location='json')
self.reqparse.add_argument('disk_used_boot', type=str, default="", location='json')
self.reqparse.add_argument('disk_free_boot', type=str, default="", location='json')
self.reqparse.add_argument('disk_used_percent_boot', type=float, default="", location='json')
self.reqparse.add_argument('eth0_received', type=str, default="", location='json')
self.reqparse.add_argument('eth0_sent', type=str, default="", location='json')
self.reqparse.add_argument('wlan0_received', type=str, default="", location='json')
self.reqparse.add_argument('wlan0_sent', type=str, default="", location='json')
self.reqparse.add_argument('lo_received', type=str, default="", location='json')
self.reqparse.add_argument('lo_sent', type=str, default="", location='json')
self.reqparse.add_argument('kernel', type=str, default="", location='json')
self.reqparse.add_argument('source', type=str, default="", required=True, location='json')
super(MonitoringAPI, self).__init__()
def post(self):
args = self.reqparse.parse_args()
action = Monitoring(args['timestamp'], args['hostname'], args['ip_eth0'], args['ip_wlan0'], args['timezone'],
args['boot_time'], args['cpu_temp'], args['cpu_usage'], args['cpu_frequency'],
args['load_1'], args['load_5'], args['load_15'], args['total_proc'], args['ram_total'],
args['ram_free'], args['ram_used'], args['ram_used_percent'], args['swap_total'],
args['swap_free'], args['swap_used'], args['swap_used_percent'], args['disk_total'],
args['disk_used'], args['disk_free'], args['disk_used_percent'],args['disk_total_boot'],
args['disk_used_boot'], args['disk_free_boot'], args['disk_used_percent_boot'],
args['eth0_received'], args['eth0_sent'], args['wlan0_received'], args['wlan0_sent'],
args['lo_received'], args['lo_sent'], args['kernel'], args['source'])
db.session.add(action)
db.session.commit()
return {'status': 'success', 'data': args}, 201
``` |
{
"source": "JorgeNustes/gbjl-probability",
"score": 4
} |
#### File: JorgeNustes/gbjl-probability/Generaldistribution.py
```python
class Distribution:
def __init__(self, mu=0, sigma=1):
""" Generic distribution class for calculating and
visualizing a probability distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
self.mean = mu
self.stdev = sigma
self.data = []
``` |
{
"source": "jorgenwh/npstructures",
"score": 3
} |
#### File: npstructures/cpstructures/arrayfunctions.py
```python
from cpstructures.raggedshape import CU_RaggedView
import numpy as np
import cupy as cp
def get_ra_func(name):
return lambda ragged_array, *args, **kwargs: getattr(ragged_array, name)(*args, **kwargs)
REDUCTIONS = {np.add: "sum",
np.logical_and: "all",
np.logical_or: "any",
np.maximum: "max",
np.minimum: "min",
np.multiply: "prod"}
ACCUMULATIONS = {np.multiply: "cumprod",
np.add: "cumsum"}
HANDLED_FUNCTIONS = {getattr(np, name): get_ra_func(name) for name in
list(REDUCTIONS.values()) + list(ACCUMULATIONS.values()) + ["nonzero", "mean", "std", "argmax", "argmin"]}
```
#### File: npstructures/npstructures/raggedshape.py
```python
from numbers import Number
import numpy as np
class ViewBase:
def __init__(self, codes, lengths=None):
if lengths is None:
self._codes = codes.view(np.int32)
else:
starts = np.asanyarray(codes, dtype=np.int32)
lengths = np.asanyarray(lengths, dtype=np.int32)
if not lengths.size:
self._codes = np.array([], dtype=np.int32)
else:
self._codes = np.hstack((starts[:, None], lengths[:, None])).flatten()
def __eq__(self, other):
return np.all(self._codes==other._codes)
def __repr__(self):
return f"{self.__class__.__name__}({self.starts}, {self.lengths})"
@property
def lengths(self):
"""The row lengths"""
return self._codes[1::2]
@property
def starts(self):
"""The start index of each row"""
return self._codes[::2]
@property
def ends(self):
"""The end index of each row"""
return self.starts+self.lengths
@property
def n_rows(self):
"""Number of rows"""
if isinstance(self.starts, Number):
return 1
return self.starts.size
def empty_rows_removed(self):
"""Check wheter the `View` with certainty have no empty rows
Returns
-------
bool
Whether or not it is cerain that this view contins no empty rows
"""
return hasattr(self, "empty_removed") and self.empty_removed
def ravel_multi_index(self, indices):
"""Return the flattened indices of a set of array indices
Parameters
----------
indices : tuple
Tuple containing the row- and column indices to ravel
Returns
-------
array
array containing the flattenened indices
"""
return self.starts[indices[0]]+np.asanyarray(indices[1], dtype=np.int32)
def unravel_multi_index(self, flat_indices):
"""Return array indices for a set of flat indices
Parameters
----------
indices : index_like
flat indices to unravel
Returns
-------
tuple
tuple containing the unravelled row- and column indices
"""
starts = self.starts
rows = np.searchsorted(starts, flat_indices, side="right")-1
cols = flat_indices-starts[rows]
return rows, cols
def index_array(self):
"""Return an array of broadcasted row indices"""
diffs = np.zeros(self.size, dtype=np.int32)
diffs[self.starts[1:]] = 1
return np.cumsum(diffs)
class RaggedRow:
def __init__(self, code):
code = np.atleast_1d(code).view(np.int32)
self.starts = code[0]
self.legths = code[1]
self.ends = code[0]+code[1]
class RaggedShape(ViewBase):
""" Class that represents the shape of a ragged array.
Represents the same information as a list of row lengths.
Parameters
----------
codes : list or array_like
Either a list of row lengths, or if ``is_coded=True`` an array containing row-starts
and row-lengths as 32-bit numbers.
is_coded : bool, default=False
if `False`, the `codes` are interpreted as row lengths.
Attributes
----------
starts
lengths
ends
"""
def __init__(self, codes, is_coded=False):
if is_coded: # isinstance(codes, np.ndarray) and codes.dtype==np.uint64:
super().__init__(codes)
self._is_coded = True
else:
lengths = np.asanyarray(codes, dtype=np.int32)
starts = np.insert(lengths.cumsum(dtype=np.int32)[:-1], 0, np.int32(0))
super().__init__(starts, lengths)
self._is_coded = True
def __repr__(self):
return f"{self.__class__.__name__}({self.lengths})"
def __str__(self):
return str(self.lengths)
def __getitem__(self, index):
if not isinstance(index, slice) or isinstance(index, Number):
return NotImplemented
if isinstance(index, Number):
index = [index]
new_codes = self._codes.view(np.uint64)[index].copy().view(np.int32)
new_codes[::2] -= new_codes[0]
return self.__class__(new_codes, is_coded=True)
@property
def size(self):
"""The sum of the row lengths"""
if not self.n_rows:
return 0
return self.starts[-1]+self.lengths[-1]
def view(self, indices):
"""Return a view of a subset of rows
Return a view with row information for the row given by `indices`
Parameters
----------
indices : index_like
Used to index the rows
Returns
-------
RaggedView
RaggedView containing information to find the rows specified by `indices`
"""
if isinstance(indices, Number):
return RaggedRow(self._codes.view(np.uint64)[indices])
return RaggedView(self._codes.view(np.uint64)[indices])
def to_dict(self):
"""Return a `dict` of all necessary variables"""
return {"codes": self._codes}
@classmethod
def from_dict(cls, d):
"""Load a `Shape` object from a dict of necessary variables
Paramters
---------
d : dict
`dict` containing all the variables needed to initialize a RaggedShape
Returns
-------
RaggedShape
"""
if "offsets" in d:
return cls(np.diff(d["offsets"]))
else:
return cls(d["codes"])
@classmethod
def asshape(cls, shape):
"""Create a `Shape` from either a list of row lengths or a `Shape`
If `shape` is already a `RaggedShape`, do nothing. Else construct a new
`RaggedShape` object
Parameters
----------
shape : RaggedShape or array_like
Returns
-------
RaggedShape
"""
if isinstance(shape, RaggedShape):
return shape
return cls(shape)
def broadcast_values(self, values, dtype=None):
"""Broadcast the values in a column vector to the data of a ragged array
The resulting array is such that a `RaggedArray` with `self` as shape will
have the rows filled with the values in `values. I.e.
``RaggedArray(ret, self)[row, j] = values[row, 1]``
Parameters
----------
values : array_like
column vectors with values to be broadcasted
Returns
-------
array
flat array with broadcasted values
"""
values = np.asanyarray(values)
assert values.shape == (self.n_rows, 1), (values.shape, (self.n_rows, 1))
if self.empty_rows_removed():
return self._broadcast_values_fast(values, dtype)
values = values.ravel()
broadcast_builder = np.zeros(self.size+1, dtype=dtype)
broadcast_builder[self.ends[::-1]] -= values[::-1]
broadcast_builder[0] = 0
broadcast_builder[self.starts] += values
func = np.logical_xor if values.dtype==bool else np.add
return func.accumulate(broadcast_builder[:-1])
def _broadcast_values_fast(self, values, dtype=None):
values = values.ravel()
broadcast_builder = np.zeros(self.size, dtype=dtype)
broadcast_builder[self.starts[1:]] = np.diff(values)
broadcast_builder[0] = values[0]
func = np.logical_xor if values.dtype==bool else np.add
func.accumulate(broadcast_builder, out=broadcast_builder)
return broadcast_builder
class RaggedView(ViewBase):
"""Class to represent a view onto subsets of rows
Same as RaggedShape, except without the constraint that the rows
fill the whole data array. I.e. ``np.all(self.ends[:-1]==self.starts[1:])``
does not necessarilty hold.
Parameters
----------
codes : array_like
Either a list of row starts, or if `lengths` is provided an array containing row-starts
and row-lengths as 32-bit numbers.
lengths : array_like, optional
the lengths of the rows
Attributes
----------
starts
lengths
ends
"""
def __getitem__(self, index):
if isinstance(index, Number):
return RaggedRow(self._codes.view(np.uint64)[index])
return self.__class__(self._codes.view(np.uint64)[index])
def get_shape(self):
""" Return the shape of a ragged array containing the view's rows
Returns
-------
RaggedShape
The shape of a ragged array consisting of the rows in this view
"""
if not self.n_rows:
return RaggedShape(self._codes, is_coded=True)
codes = self._codes.copy()
np.cumsum(codes[1:-1:2], out=codes[2::2])
codes[0] = 0
return RaggedShape(codes, is_coded=True)
def get_flat_indices(self):
"""Return the indices into a flattened array
Return the indices of all the elements in all the
rows in this view
Returns
-------
array
"""
if not self.n_rows:
return np.ones(0, dtype=np.int32), self.get_shape()
if self.empty_rows_removed():
return self._get_flat_indices_fast()
shape = self.get_shape()
index_builder = np.ones(shape.size+1, dtype=np.int32)
index_builder[shape.ends[::-1]] = 1-self.ends[::-1]
index_builder[0] = 0
index_builder[shape.starts] += self.starts
np.cumsum(index_builder, out=index_builder)
return index_builder[:-1], shape
def _get_flat_indices_fast(self):
shape = self.get_shape()
index_builder = np.ones(shape.size, dtype=np.int32)
index_builder[shape.starts[1:]] = np.diff(self.starts)-self.lengths[:-1]+1
index_builder[0] = self.starts[0]
np.cumsum(index_builder, out=index_builder)
shape.empty_removed = True
return index_builder, shape
``` |
{
"source": "JorgeOsorio97/CRide-platzi",
"score": 3
} |
#### File: cride/users/permissions.py
```python
from rest_framework.permissions import BasePermission
class IsAccountOwner(BasePermission):
"""Allow access only to account object owned by the requesting user."""
def has_object_permission(self, request, view, obj):
"""Chck object and user are the same"""
return request.user == obj
``` |
{
"source": "jorgepadilla19/gdsfactory",
"score": 3
} |
#### File: fixme/fixed/circle.py
```python
import gdsfactory as gf
def option1():
r = 0.1
c1 = gf.components.circle(radius=r, angle_resolution=10, layer=(1, 0))
c1.show()
def option2():
"""not recommended"""
r = 0.1
c2 = gf.components.circle(radius=r, angle_resolution=2, layer=(1, 0))
gdspath1 = c2.write_gds(
precision=1e-9
) # 1nm is the default precision for most Photonics fabs
gf.show(gdspath1)
gdspath2 = c2.write_gds(
precision=10e-12
) # you can also increase to 10pm resolution
gf.show(gdspath2)
if __name__ == "__main__":
option2()
```
#### File: gdsfactory/fixme/test_load_component.py
```python
import jsondiff
from pytest_regressions.data_regression import DataRegressionFixture
import gdsfactory as gf
def test_load_component_gds() -> gf.Component:
gdspath = gf.CONFIG["gdsdir"] / "straight.gds"
c = gf.import_gds(gdspath)
assert c.hash_geometry() == "4b8f6646dcf60b78b905ac0c1665a35f119be32a"
return c
def test_load_component_settings(data_regression: DataRegressionFixture) -> None:
gdspath = gf.CONFIG["gdsdir"] / "straight.gds"
c = gf.import_gds(gdspath)
data_regression.check(c.to_dict())
def test_load_component_with_settings():
"""Ensures we can load it from GDS + YAML and get the same component settings"""
c1 = gf.components.straight(length=1.234)
gdspath = gf.CONFIG["gdsdir"] / "straight.gds"
c2 = gf.import_gds(gdspath)
d1 = c1.to_dict()
d2 = c2.to_dict()
d = jsondiff.diff(d1, d2)
assert len(d) == 0, d
def _write_gds():
c = gf.components.straight(length=1.234)
gdspath = gf.CONFIG["gdsdir"] / "straight.gds"
c.write_gds_with_metadata(gdspath)
if __name__ == "__main__":
# _write_gds()
# test_load_component_gds()
# test_load_component_settings()
test_load_component_with_settings()
# c1 = gf.components.straight()
# gdspath = gf.CONFIG["gdsdir"] / "straight.gds"
# c2 = gf.import_gds(gdspath)
# d = jsondiff.diff(c1.to_dict, c2.to_dict())
# print(d)
```
#### File: gdsfactory/autoplacer/chip_array.py
```python
import itertools
import os
import klayout.db as pya
import gdsfactory.autoplacer.functions as ap
from gdsfactory.autoplacer.auto_placer import AutoPlacer
from gdsfactory.autoplacer.library import Library
class ChipArray(AutoPlacer):
"""An array of chiplets with dicing lanes"""
def __init__(
self,
name: str,
mask_width: float,
mask_height: float,
cols: int,
rows: int,
lib: Library,
spacing: int = 25000,
lane_width: int = 50000,
align: None = None,
) -> None:
super(ChipArray, self).__init__(name, mask_width, mask_height)
self.lib = lib
self.rows = rows
self.cols = cols
self.spacing = spacing
self.lane_width = lane_width
self.align = align
# Infer chip width and height
self.chip_width = (mask_width - (cols - 1) * spacing) / cols
self.chip_height = (mask_height - (rows - 1) * spacing) / rows
self.make_chips()
# self.make_dicing_lanes()
def make_chips(self) -> None:
"""Make all the chips"""
# Get the aligntree
if self.align:
aligntrees = self.lib.get(self.align)
self.aligntree = aligntrees[0]
# Make all the chips
self.chips = []
for row, col in itertools.product(
list(range(self.rows)), list(range(self.cols))
):
name = "{}{}".format(row, col)
chip = AutoPlacer(name, self.chip_width, self.chip_height)
if self.align:
for corner in ap.CORNERS:
chip.pack_auto(self.aligntree, corner)
chip.draw_boundary()
chip.row, chip.col = row, col
self.chips.append(chip)
def make_dicing_lanes(self):
"""Make the dicing lanes"""
container = self.create_cell("DicingLanes")
instance = pya.CellInstArray(container.cell_index(), pya.Trans(0, 0))
self.cell(self.name).insert(instance)
lw = self.lane_width / 2
for dicing_layer in ap.DICING_LAYERS:
layer = self.layer(dicing_layer[0], dicing_layer[1])
for row in range(1, self.rows):
y = row * (self.chip_height + self.spacing) - self.spacing / 2
box = pya.Box(0, y - lw, self.max_width, y + lw)
container.shapes(layer).insert(box)
for col in range(1, self.cols):
x = col * (self.chip_width + self.spacing) - self.spacing / 2
for row in range(self.rows):
y1 = row * (self.chip_height + self.spacing)
y2 = (row + 1) * (self.chip_height + self.spacing) - self.spacing
box = pya.Box(x - lw, y1, x + lw, y2)
container.shapes(layer).insert(box)
# on the corners, line has half the width
lw = self.lane_width / 4
for row in [0, self.rows]:
y = row * (self.chip_height + self.spacing) - self.spacing / 2
box = pya.Box(0, y - lw, self.max_width, y + lw)
container.shapes(layer).insert(box)
for col in [0, self.cols]:
x = col * (self.chip_width + self.spacing) - self.spacing / 2
for row in range(self.rows):
y1 = row * (self.chip_height + self.spacing)
y2 = (row + 1) * (self.chip_height + self.spacing) - self.spacing
box = pya.Box(x - lw, y1, x + lw, y2)
container.shapes(layer).insert(box)
def write(self, *args, **kwargs) -> None:
"""Write to disk. We pack the chips at the last minute."""
self.draw_boundary(ap.DEVREC_LAYER)
self.draw_boundary(ap.FLOORPLAN_LAYER)
for chip in self.chips:
x = chip.col * (self.chip_width + self.spacing)
y = chip.row * (self.chip_height + self.spacing)
self.pack_manual(chip.top_cell(), x, y)
super(ChipArray, self).write(*args, **kwargs)
def write_chips(self, name=None, path=None):
if name is None:
name = self.name
if path is None:
path = os.path.join("build", "mask")
filename = os.path.join(path, name)
for chip in self.chips:
chip.write(filename + "_" + chip.name + ".gds")
if __name__ == "__main__":
lib = Library()
mask = ChipArray("chip_array", 25e6, 25e6, 3, 4, lib)
mask.pack_grid(lib.pop("align"))
mask.pack_grid(lib.pop(".*"))
mask.write("build/chip_array.gds")
```
#### File: gdsfactory/gdsfactory/component.py
```python
import datetime
import hashlib
import itertools
import pathlib
import tempfile
import uuid
import warnings
from pathlib import Path
from typing import Any, Dict, List, Optional, Set, Tuple, Union
import gdspy
import networkx as nx
import numpy as np
import yaml
from numpy import int64
from omegaconf import DictConfig, OmegaConf
from phidl.device_layout import Device, _parse_layer
from typing_extensions import Literal
from gdsfactory.component_reference import ComponentReference, Coordinate, SizeInfo
from gdsfactory.config import CONF, logger
from gdsfactory.cross_section import CrossSection
from gdsfactory.layers import LAYER_SET, LayerPhidl, LayerSet
from gdsfactory.port import (
Port,
auto_rename_ports,
auto_rename_ports_counter_clockwise,
auto_rename_ports_layer_orientation,
auto_rename_ports_orientation,
map_ports_layer_to_orientation,
map_ports_to_orientation_ccw,
map_ports_to_orientation_cw,
select_ports,
)
from gdsfactory.serialization import clean_dict
from gdsfactory.snap import snap_to_grid
Plotter = Literal["holoviews", "matplotlib", "qt"]
Axis = Literal["x", "y"]
class MutabilityError(ValueError):
pass
PathType = Union[str, Path]
Float2 = Tuple[float, float]
Layer = Tuple[int, int]
Layers = Tuple[Layer, ...]
tmp = pathlib.Path(tempfile.TemporaryDirectory().name) / "gdsfactory"
tmp.mkdir(exist_ok=True, parents=True)
_timestamp2019 = datetime.datetime.fromtimestamp(1572014192.8273)
MAX_NAME_LENGTH = 32
class Component(Device):
"""Extend phidl.Device
Allow name to be set like Component('arc') or Component(name = 'arc')
- get/write YAML metadata
- get ports by type (optical, electrical ...)
- set data_analysis and test_protocols
Args:
name: component_name
Properties:
info: dictionary that includes
- derived properties
- external metadata (test_protocol, docs, ...)
- simulation_settings
- function_name
- name: for the component
settings:
full: full settings passed to the function to create component
changed: changed settings
default: default component settings
child: dict info from the children, if any
"""
def __init__(
self,
name: str = "Unnamed",
version: str = "0.0.1",
changelog: str = "",
**kwargs,
) -> None:
self.__ports__ = {}
self.aliases = {}
self.uid = str(uuid.uuid4())[:8]
if "with_uuid" in kwargs or name == "Unnamed":
name += "_" + self.uid
super(Component, self).__init__(name=name, exclude_from_current=True)
self.name = name # overwrite PHIDL's incremental naming convention
self.info = {}
self.settings = {}
self._locked = False
self.get_child_name = False
self.version = version
self.changelog = changelog
def unlock(self):
"""I recommend doing this only if you know what you are doing."""
self._locked = False
def lock(self):
"""Makes sure components can't add new elements or move existing ones.
Components lock automatically when going into the CACHE to ensure one
component does not change others
"""
self._locked = True
@classmethod
def __get_validators__(cls):
yield cls.validate
@classmethod
def validate(cls, v):
"""pydantic assumes component is valid if:
- name characters < MAX_NAME_LENGTH
- is not empty (has references or polygons)
"""
MAX_NAME_LENGTH = 100
assert isinstance(
v, Component
), f"TypeError, Got {type(v)}, expecting Component"
assert (
len(v.name) <= MAX_NAME_LENGTH
), f"name `{v.name}` {len(v.name)} > {MAX_NAME_LENGTH} "
# assert v.references or v.polygons, f"No references or polygons in {v.name}"
return v
@property
def bbox(self):
"""Return the bounding box of the DeviceReference.
it snaps to 3 decimals in um (0.001um = 1nm precision)
"""
bbox = self.get_bounding_box()
if bbox is None:
bbox = ((0, 0), (0, 0))
return np.round(bbox, 3)
@property
def ports_layer(self) -> Dict[str, str]:
"""Return a mapping from layer0_layer1_E0: portName"""
return map_ports_layer_to_orientation(self.ports)
def port_by_orientation_cw(self, key: str, **kwargs):
"""Return port by indexing them clockwise"""
m = map_ports_to_orientation_cw(self.ports, **kwargs)
if key not in m:
raise KeyError(f"{key} not in {list(m.keys())}")
key2 = m[key]
return self.ports[key2]
def port_by_orientation_ccw(self, key: str, **kwargs):
"""Return port by indexing them clockwise"""
m = map_ports_to_orientation_ccw(self.ports, **kwargs)
if key not in m:
raise KeyError(f"{key} not in {list(m.keys())}")
key2 = m[key]
return self.ports[key2]
def get_ports_xsize(self, **kwargs) -> float:
"""Return xdistance from east to west ports
Keyword Args:
layer: port GDS layer
prefix: with in port name
orientation: in degrees
width:
layers_excluded: List of layers to exclude
port_type: optical, electrical, ...
"""
ports_cw = self.get_ports_list(clockwise=True, **kwargs)
ports_ccw = self.get_ports_list(clockwise=False, **kwargs)
return snap_to_grid(ports_ccw[0].x - ports_cw[0].x)
def get_ports_ysize(self, **kwargs) -> float:
"""Return ydistance from east to west ports
Keyword Args:
layer: port GDS layer
prefix: with in port name
orientation: in degrees
width:
layers_excluded: List of layers to exclude
port_type: optical, electrical, ...
"""
ports_cw = self.get_ports_list(clockwise=True, **kwargs)
ports_ccw = self.get_ports_list(clockwise=False, **kwargs)
return snap_to_grid(ports_ccw[0].y - ports_cw[0].y)
def plot_netlist(
self, with_labels: bool = True, font_weight: str = "normal"
) -> nx.Graph:
"""plots a netlist graph with networkx
https://networkx.github.io/documentation/stable/reference/generated/networkx.drawing.nx_pylab.draw_networkx.html
Args:
with_labels: label nodes
font_weight: normal, bold
"""
netlist = self.get_netlist()
connections = netlist["connections"]
placements = netlist["placements"]
G = nx.Graph()
G.add_edges_from(
[
(",".join(k.split(",")[:-1]), ",".join(v.split(",")[:-1]))
for k, v in connections.items()
]
)
pos = {k: (v["x"], v["y"]) for k, v in placements.items()}
labels = {k: ",".join(k.split(",")[:1]) for k in placements.keys()}
nx.draw(
G,
with_labels=with_labels,
font_weight=font_weight,
labels=labels,
pos=pos,
)
return G
def get_netlist_yaml(self) -> str:
"""Return YAML netlist."""
return OmegaConf.to_yaml(self.get_netlist())
def write_netlist(self, filepath: str) -> None:
"""Write netlist in YAML"""
netlist = self.get_netlist()
OmegaConf.save(netlist, filepath)
def write_netlist_dot(self, filepath: Optional[str] = None) -> None:
"""Write netlist graph in DOT format."""
from networkx.drawing.nx_agraph import write_dot
filepath = filepath or f"{self.name}.dot"
G = self.plot_netlist()
write_dot(G, filepath)
def get_netlist(self) -> Any:
"""Return netlist dict(instances, placements, connections, ports)
instances = {instances}
placements = {instance_name,uid,x,y: dict(x=0, y=0, rotation=90), ...}
connections = {instance_name_src_x_y,portName: instance_name_dst_x_y,portName}
ports: {portName: instace_name,portName}
"""
from gdsfactory.get_netlist import get_netlist
return get_netlist(component=self)
def assert_ports_on_grid(self, nm: int = 1) -> None:
"""Asserts that all ports are on grid."""
for port in self.ports.values():
port.assert_on_grid(nm=nm)
def get_ports_dict(self, **kwargs) -> Dict[str, Port]:
"""Return a dict of ports.
Keyword Args:
layer: port GDS layer
prefix: for example "E" for east, "W" for west ...
"""
return select_ports(self.ports, **kwargs)
def get_ports_list(self, **kwargs) -> List[Port]:
"""Return list of ports.
Keyword Args:
layer: port GDS layer
prefix: with in port name
orientation: in degrees
width:
layers_excluded: List of layers to exclude
port_type: optical, electrical, ...
clockwise: if True, sort ports clockwise, False: counter-clockwise
"""
return list(select_ports(self.ports, **kwargs).values())
def ref(
self,
position: Coordinate = (0, 0),
port_id: Optional[str] = None,
rotation: int = 0,
h_mirror: bool = False,
v_mirror: bool = False,
) -> "ComponentReference":
"""Returns Component reference.
Args:
position:
port_id: name of the port
rotation: in degrees
h_mirror: horizontal mirror using y axis (x, 1) (1, 0).
This is the most common mirror.
v_mirror: vertical mirror using x axis (1, y) (0, y)
"""
_ref = ComponentReference(self)
if port_id and port_id not in self.ports:
raise ValueError(f"port {port_id} not in {self.ports.keys()}")
if port_id:
origin = self.ports[port_id].position
else:
origin = (0, 0)
if h_mirror:
_ref.reflect_h(port_id)
if v_mirror:
_ref.reflect_v(port_id)
if rotation != 0:
_ref.rotate(rotation, origin)
_ref.move(origin, position)
return _ref
def ref_center(self, position=(0, 0)):
"""returns a reference of the component centered at (x=0, y=0)"""
si = self.size_info
yc = si.south + si.height / 2
xc = si.west + si.width / 2
center = (xc, yc)
_ref = ComponentReference(self)
_ref.move(center, position)
return _ref
def __repr__(self) -> str:
return f"{self.name}: uid {self.uid}, ports {list(self.ports.keys())}, aliases {list(self.aliases.keys())}, {len(self.polygons)} polygons, {len(self.references)} references"
def pprint(self) -> None:
"""Prints component info."""
# print(OmegaConf.to_yaml(self.to_dict()))
print(yaml.dump(self.to_dict()))
def pprint_ports(self) -> None:
"""Prints component netlists."""
ports_list = self.get_ports_list()
for port in ports_list:
print(port)
@property
def metadata_child(self) -> DictConfig:
"""Returns metadata from child if any,
Otherwise returns component own metadata
Great to access the children metadata at the bottom
of the hierarchy.
"""
settings = dict(self.settings)
while settings.get("child"):
settings = settings.get("child")
return DictConfig(dict(settings))
@property
def metadata(self) -> DictConfig:
return DictConfig(dict(self.settings))
def add_port(
self,
name: Optional[Union[str, int, object]] = None,
midpoint: Tuple[float, float] = (
0.0,
0.0,
),
width: float = 1.0,
orientation: float = 45,
port: Optional[Port] = None,
layer: Tuple[int, int] = (1, 0),
port_type: str = "optical",
cross_section: Optional[CrossSection] = None,
) -> Port:
"""Add port to component.
You can copy an existing port like add_port(port = existing_port) or
create a new port add_port(myname, mymidpoint, mywidth, myorientation).
You can also copy an existing port
with a new name add_port(port = existing_port, name = new_name)
Args:
name:
midpoint:
orientation: in deg
port: optional port
layer:
port_type: optical, electrical, vertical_dc, vertical_te, vertical_tm
cross_section:
"""
if port:
if not isinstance(port, Port):
raise ValueError(f"add_port() needs a Port, got {type(port)}")
p = port.copy(new_uid=True)
if name is not None:
p.name = name
p.parent = self
elif isinstance(name, Port):
p = name.copy(new_uid=True)
p.parent = self
name = p.name
else:
half_width = width / 2
half_width_correct = snap_to_grid(half_width, nm=1)
if not np.isclose(half_width, half_width_correct):
warnings.warn(
f"port width = {width} will create off-grid points.\n"
f"You can fix it by changing width to {2*half_width_correct}\n"
f"port {name}, {midpoint} {orientation} deg",
stacklevel=3,
)
p = Port(
name=name,
midpoint=(snap_to_grid(midpoint[0]), snap_to_grid(midpoint[1])),
width=snap_to_grid(width),
orientation=orientation,
parent=self,
layer=layer,
port_type=port_type,
cross_section=cross_section,
)
if name is not None:
p.name = name
if p.name in self.ports:
raise ValueError(f"add_port() Port name {p.name!r} exists in {self.name!r}")
self.ports[p.name] = p
return p
def add_ports(self, ports: Union[List[Port], Dict[str, Port]], prefix: str = ""):
"""Add a list or dict of ports,
you can include a prefix to add to the new port names to avoid name conflicts.
Args:
ports: list or dict of ports
prefix: to prepend to each port name
"""
ports = ports if isinstance(ports, list) else ports.values()
for port in list(ports):
name = f"{prefix}{port.name}" if prefix else port.name
self.add_port(name=name, port=port)
def snap_ports_to_grid(self, nm: int = 1) -> None:
for port in self.ports.values():
port.snap_to_grid(nm=nm)
def remove_layers(
self,
layers: Union[List[Tuple[int, int]], Tuple[int, int]] = (),
include_labels: bool = True,
invert_selection: bool = False,
recursive: bool = True,
) -> Device:
"""Remove a list of layers and returns a new Component.
Args:
layers: list of layers to remove.
include_labels: remove labels on those layers.
invert_selection: removes all layers except layers specified.
recursive: operate on the cells included in this cell.
"""
layers = [_parse_layer(layer) for layer in layers]
all_D = list(self.get_dependencies(recursive))
all_D += [self]
for D in all_D:
for polygonset in D.polygons:
polygon_layers = zip(polygonset.layers, polygonset.datatypes)
polygons_to_keep = [(pl in layers) for pl in polygon_layers]
if not invert_selection:
polygons_to_keep = [(not p) for p in polygons_to_keep]
polygonset.polygons = [
p for p, keep in zip(polygonset.polygons, polygons_to_keep) if keep
]
polygonset.layers = [
p for p, keep in zip(polygonset.layers, polygons_to_keep) if keep
]
polygonset.datatypes = [
p for p, keep in zip(polygonset.datatypes, polygons_to_keep) if keep
]
if include_labels:
new_labels = []
for label in D.labels:
original_layer = (label.layer, label.texttype)
original_layer = _parse_layer(original_layer)
if invert_selection:
keep_layer = original_layer in layers
else:
keep_layer = original_layer not in layers
if keep_layer:
new_labels += [label]
D.labels = new_labels
return self
def extract(
self,
layers: Union[List[Tuple[int, int]], Tuple[int, int]] = (),
) -> Device:
"""Extract polygons from a Component and returns a new Component.
Adapted from phidl.geometry.
"""
from gdsfactory.name import clean_value
component = Component(f"{self.name}_{clean_value(layers)}")
if type(layers) not in (list, tuple):
raise ValueError("layers needs to be a list or tuple")
poly_dict = self.get_polygons(by_spec=True)
parsed_layer_list = [_parse_layer(layer) for layer in layers]
for layer, polys in poly_dict.items():
if _parse_layer(layer) in parsed_layer_list:
component.add_polygon(polys, layer=layer)
return component
def copy(
self, prefix: str = "", suffix: str = "_copy", cache: bool = True
) -> Device:
from gdsfactory.copy import copy
return copy(self, prefix=prefix, suffix=suffix, cache=cache)
def copy_child_info(self, component: "Component") -> None:
"""Copy info from child component into parent.
Parent components can access child cells settings.
"""
self.get_child_name = True
self.child = component
@property
def size_info(self) -> SizeInfo:
"""size info of the component"""
return SizeInfo(self.bbox)
def get_setting(self, setting: str) -> Union[str, int, float]:
return (
self.info.get(setting)
or self.settings.full.get(setting)
or self.metadata_child.get(setting)
)
def is_unlocked(self) -> None:
"""Raises error if Component is locked"""
if self._locked:
raise MutabilityError(
f"You cannot modify locked Component {self.name!r}. "
"You need to make a copy of this Component or create a new Component "
"and add a reference to it. "
"Changing a component after creating it can be dangerous "
"as it will affect all of its instances. "
"You can unlock it (at your own risk) by calling `unlock()`"
)
def add(self, element) -> None:
"""Add a new element or list of elements to this Component
Args:
element : `PolygonSet`, `CellReference`, `CellArray` or iterable
The element or iterable of elements to be inserted in this
cell.
Raises:
MutabilityError: if component is locked.
"""
self.is_unlocked()
super().add(element)
def flatten(self, single_layer: Optional[Tuple[int, int]] = None):
"""Returns a flattened copy of the component
Flattens the hierarchy of the Component such that there are no longer
any references to other Components. All polygons and labels from
underlying references are copied and placed in the top-level Component.
If single_layer is specified, all polygons are moved to that layer.
Args:
single_layer: move all polygons are moved to the specified
"""
component_flat = self.copy()
component_flat.polygons = []
component_flat.references = []
poly_dict = self.get_polygons(by_spec=True)
for layer, polys in poly_dict.items():
component_flat.add_polygon(polys, layer=single_layer or layer)
component_flat.name = f"{self.name}_flat"
return component_flat
def add_ref(
self, component: Device, alias: Optional[str] = None
) -> "ComponentReference":
"""Add ComponentReference to the current Component."""
if not isinstance(component, Device):
raise TypeError(f"type = {type(Component)} needs to be a Component.")
ref = ComponentReference(component)
self.add(ref)
if alias is not None:
self.aliases[alias] = ref
return ref
def get_layers(self) -> Union[Set[Tuple[int, int]], Set[Tuple[int64, int64]]]:
"""Return a set of (layer, datatype)
.. code ::
import gdsfactory as gf
gf.components.straight().get_layers() == {(1, 0), (111, 0)}
"""
layers = set()
for element in itertools.chain(self.polygons, self.paths):
for layer, datatype in zip(element.layers, element.datatypes):
layers.add((layer, datatype))
for reference in self.references:
for layer, datatype in reference.ref_cell.get_layers():
layers.add((layer, datatype))
for label in self.labels:
layers.add((label.layer, 0))
return layers
def _repr_html_(self):
"""Print component, show geometry in klayout and return plot
for jupyter notebooks
"""
self.show(show_ports=False)
print(self)
return self.plot(plotter="matplotlib")
def plot(self, plotter: Optional[Plotter] = None, **kwargs) -> None:
"""Return component plot.
Args:
plotter: backend ('holoviews', 'matplotlib', 'qt').
KeyError Args:
layers_excluded: list of layers to exclude.
layer_set: layer_set colors loaded from Klayout.
min_aspect: minimum aspect ratio.
"""
plotter = plotter or CONF.get("plotter", "matplotlib")
if plotter == "matplotlib":
from phidl import quickplot as plot
plot(self)
elif plotter == "holoviews":
try:
import holoviews as hv
hv.extension("bokeh")
except ImportError:
print("you need to `pip install holoviews`")
return self.ploth(**kwargs)
elif plotter == "qt":
from phidl.quickplotter import quickplot2
quickplot2(self)
def ploth(
self,
layers_excluded: Optional[Layers] = None,
layer_set: LayerSet = LAYER_SET,
min_aspect: float = 0.25,
padding: float = 0.5,
):
"""Plot Component in holoviews.
adapted from dphox.device.Device.hvplot
Args:
layers_excluded: list of layers to exclude.
layer_set: layer_set colors loaded from Klayout.
min_aspect: minimum aspect ratio.
padding: around bounding box.
Returns:
Holoviews Overlay to display all polygons.
"""
from gdsfactory.add_pins import get_pin_triangle_polygon_tip
try:
import holoviews as hv
except ImportError:
print("you need to `pip install holoviews`")
self._bb_valid = False # recompute the bounding box
b = self.bbox + ((-padding, -padding), (padding, padding))
b = np.array(b.flat)
center = np.array((np.sum(b[::2]) / 2, np.sum(b[1::2]) / 2))
size = np.array((np.abs(b[2] - b[0]), np.abs(b[3] - b[1])))
dx = np.array(
(
np.maximum(min_aspect * size[1], size[0]) / 2,
np.maximum(size[1], min_aspect * size[0]) / 2,
)
)
b = np.hstack((center - dx, center + dx))
plots_to_overlay = []
layers_excluded = [] if layers_excluded is None else layers_excluded
for layer, polygon in self.get_polygons(by_spec=True).items():
if layer in layers_excluded:
continue
try:
layer = layer_set.get_from_tuple(layer)
except ValueError:
layers = list(layer_set._layers.keys())
warnings.warn(f"{layer!r} not defined in {layers}")
layer = LayerPhidl(gds_layer=layer[0], gds_datatype=layer[1])
plots_to_overlay.append(
hv.Polygons(polygon, label=str(layer.name)).opts(
data_aspect=1,
frame_width=500,
fill_alpha=layer.alpha,
ylim=(b[1], b[3]),
xlim=(b[0], b[2]),
color=layer.color,
line_alpha=layer.alpha,
tools=["hover"],
)
)
for name, port in self.ports.items():
name = str(name)
polygon, ptip = get_pin_triangle_polygon_tip(port=port)
plots_to_overlay.append(
hv.Polygons(polygon, label=name).opts(
data_aspect=1,
frame_width=500,
fill_alpha=0,
ylim=(b[1], b[3]),
xlim=(b[0], b[2]),
color="red",
line_alpha=layer.alpha,
tools=["hover"],
)
* hv.Text(ptip[0], ptip[1], name)
)
return hv.Overlay(plots_to_overlay).opts(
show_legend=True, shared_axes=False, ylim=(b[1], b[3]), xlim=(b[0], b[2])
)
def show(
self,
show_ports: bool = True,
show_subports: bool = False,
) -> None:
"""Show component in klayout.
show_subports = True adds pins to a component copy (only used for display)
so the original component remains intact.
Args:
show_ports: shows component with port markers and labels
show_subports: add ports markers and labels to component references
"""
from gdsfactory.add_pins import add_pins_triangle
from gdsfactory.show import show
if show_subports:
component = self.copy(suffix="", cache=False)
for reference in component.references:
add_pins_triangle(component=component, reference=reference)
elif show_ports:
component = self.copy(suffix="", cache=False)
add_pins_triangle(component=component)
else:
component = self
show(component)
def write_gds(
self,
gdspath: Optional[PathType] = None,
gdsdir: PathType = tmp,
unit: float = 1e-6,
precision: float = 1e-9,
timestamp: Optional[datetime.datetime] = _timestamp2019,
logging: bool = True,
on_duplicate_cell: Optional[str] = "warn",
) -> Path:
"""Write component to GDS and returns gdspath
Args:
gdspath: GDS file path to write to.
gdsdir: directory for the GDS file. Defaults to /tmp/
unit: unit size for objects in library. 1um by default.
precision: for object dimensions in the library (m). 1nm by default.
timestamp: Defaults to 2019-10-25 for consistent hash.
If None uses current time.
logging: disable GDS path logging, for example for showing it in klayout.
on_duplicate_cell: specify how to resolve duplicate-named cells. Choose one of the following:
"warn" (default): overwrite all duplicate cells with one of the duplicates (arbitrarily)
"error": throw a ValueError when attempting to write a gds with duplicate cells
"overwrite": overwrite all duplicate cells with one of the duplicates, without warning
None: do not try to resolve (at your own risk!)
"""
gdsdir = pathlib.Path(gdsdir)
gdspath = gdspath or gdsdir / (self.name + ".gds")
gdspath = pathlib.Path(gdspath)
gdsdir = gdspath.parent
gdsdir.mkdir(exist_ok=True, parents=True)
cells = self.get_dependencies(recursive=True)
cell_names = [cell.name for cell in list(cells)]
cell_names_unique = set(cell_names)
if len(cell_names) != len(set(cell_names)):
for cell_name in cell_names_unique:
cell_names.remove(cell_name)
if on_duplicate_cell == "error":
cell_names_duplicated = "\n".join(set(cell_names))
raise ValueError(
f"Duplicated cell names in {self.name!r}:\n{cell_names_duplicated}"
)
elif on_duplicate_cell in {"warn", "overwrite"}:
if on_duplicate_cell == "warn":
cell_names_duplicated = "\n".join(set(cell_names))
warnings.warn(
f"Duplicated cell names in {self.name!r}:\n{cell_names_duplicated}"
)
cells_dict = {cell.name: cell for cell in cells}
cells = cells_dict.values()
elif on_duplicate_cell is None:
pass
else:
raise ValueError(
f"on_duplicate_cell: {on_duplicate_cell!r} not in (None, warn, error, overwrite)"
)
all_cells = [self] + list(cells)
no_name_cells = [
cell.name for cell in all_cells if cell.name.startswith("Unnamed")
]
if no_name_cells:
warnings.warn(
f"Component {self.name!r} contains {len(no_name_cells)} Unnamed cells"
)
lib = gdspy.GdsLibrary(unit=unit, precision=precision)
lib.write_gds(gdspath, cells=all_cells, timestamp=timestamp)
self.path = gdspath
if logging:
logger.info(f"Write GDS to {str(gdspath)!r}")
return gdspath
def write_gds_with_metadata(self, *args, **kwargs) -> Path:
"""Write component in GDS and metadata (component settings) in YAML"""
gdspath = self.write_gds(*args, **kwargs)
metadata = gdspath.with_suffix(".yml")
metadata.write_text(self.to_yaml(with_cells=True, with_ports=True))
logger.info(f"Write YAML metadata to {str(metadata)!r}")
return gdspath
def to_dict(
self,
ignore_components_prefix: Optional[List[str]] = None,
ignore_functions_prefix: Optional[List[str]] = None,
with_cells: bool = False,
with_ports: bool = False,
) -> Dict[str, Any]:
"""Return Dict representation of a component.
Args:
ignore_components_prefix: for components to ignore when exporting
ignore_functions_prefix: for functions to ignore when exporting
with_cells: write cells recursively.
with_ports: write port information dict.
"""
d = {}
if with_ports:
ports = {port.name: port.to_dict() for port in self.get_ports_list()}
d["ports"] = ports
if with_cells:
cells = recurse_structures(
self,
ignore_functions_prefix=ignore_functions_prefix,
ignore_components_prefix=ignore_components_prefix,
)
d["cells"] = cells
d["version"] = self.version
d["settings"] = dict(self.settings)
return d
def to_yaml(self, **kwargs) -> str:
"""Write Dict representation of a component in YAML format.
Args:
ignore_components_prefix: for components to ignore when exporting
ignore_functions_prefix: for functions to ignore when exporting
with_cells: write cells recursively
with_ports: write port information
"""
return OmegaConf.to_yaml(self.to_dict(**kwargs))
def to_dict_polygons(self) -> Dict[str, Any]:
"""Returns a dict representation of the flattened component."""
d = {}
polygons = {}
layer_to_polygons = self.get_polygons(by_spec=True)
for layer, polygons_layer in layer_to_polygons.items():
for polygon in polygons_layer:
layer_name = f"{layer[0]}_{layer[1]}"
polygons[layer_name] = [tuple(snap_to_grid(v)) for v in polygon]
ports = {port.name: port.settings for port in self.get_ports_list()}
clean_dict(ports)
clean_dict(polygons)
d.info = self.info
d.polygons = polygons
d.ports = ports
return d
def auto_rename_ports(self, **kwargs) -> None:
"""Rename ports by orientation NSEW (north, south, east, west).
Keyword Args:
function: to rename ports
select_ports_optical:
select_ports_electrical:
prefix_optical:
prefix_electrical:
.. code::
3 4
|___|_
2 -| |- 5
| |
1 -|______|- 6
| |
8 7
"""
self.is_unlocked()
auto_rename_ports(self, **kwargs)
def auto_rename_ports_counter_clockwise(self, **kwargs) -> None:
self.is_unlocked()
auto_rename_ports_counter_clockwise(self, **kwargs)
def auto_rename_ports_layer_orientation(self, **kwargs) -> None:
self.is_unlocked()
auto_rename_ports_layer_orientation(self, **kwargs)
def auto_rename_ports_orientation(self, **kwargs) -> None:
"""Rename ports by orientation NSEW (north, south, east, west).
Keyword Args:
function: to rename ports
select_ports_optical:
select_ports_electrical:
prefix_optical:
prefix_electrical:
.. code::
N0 N1
|___|_
W1 -| |- E1
| |
W0 -|______|- E0
| |
S0 S1
"""
self.is_unlocked()
auto_rename_ports_orientation(self, **kwargs)
def move(
self,
origin: Float2 = (0, 0),
destination: Optional[Float2] = None,
axis: Optional[Axis] = None,
) -> Device:
"""Return new Component with a moved reference to the original component.
Args:
origin: of component
destination:
axis: x or y
"""
from gdsfactory.functions import move
return move(component=self, origin=origin, destination=destination, axis=axis)
def mirror(
self,
p1: Float2 = (0, 1),
p2: Float2 = (0, 0),
) -> Device:
"""Return new Component with a mirrored reference.
Args:
p1: first point to define mirror axis
p2: second point to define mirror axis
"""
from gdsfactory.functions import mirror
return mirror(component=self, p1=p1, p2=p2)
def rotate(self, angle: float = 90) -> Device:
"""Return a new component with a rotated reference to the original component
Args:
angle: in degrees
"""
from gdsfactory.functions import rotate
return rotate(component=self, angle=angle)
def add_padding(self, **kwargs) -> Device:
"""Return component with padding
Keyword Args:
component
layers: list of layers
suffix for name
default: default padding (50um)
top: north padding
bottom: south padding
right: east padding
left: west padding
"""
from gdsfactory.add_padding import add_padding
return add_padding(component=self, **kwargs)
def test_get_layers() -> Device:
import gdsfactory as gf
c = gf.components.straight(
length=10, width=0.5, layer=(2, 0), layers_cladding=((111, 0),)
)
assert c.get_layers() == {(2, 0), (111, 0)}, c.get_layers()
c.remove_layers((111, 0))
assert c.get_layers() == {(2, 0)}, c.get_layers()
return c
def _filter_polys(polygons, layers_excl):
return [
p
for p, l, d in zip(polygons.polygons, polygons.layers, polygons.datatypes)
if (l, d) not in layers_excl
]
def recurse_structures(
component: Component,
ignore_components_prefix: Optional[List[str]] = None,
ignore_functions_prefix: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Recurse component and its components recursively.
Args:
component: component to recurse.
ignore_components_prefix: list of prefix to ingore
ignore_functions_prefix: list of prefix to ingore
"""
ignore_functions_prefix = ignore_functions_prefix or []
ignore_components_prefix = ignore_components_prefix or []
if (
hasattr(component, "function_name")
and component.function_name in ignore_functions_prefix
):
return {}
if hasattr(component, "name") and any(
[component.name.startswith(i) for i in ignore_components_prefix]
):
return {}
output = {component.name: dict(component.settings)}
for reference in component.references:
if (
isinstance(reference, ComponentReference)
and reference.ref_cell.name not in output
):
output.update(recurse_structures(reference.ref_cell))
return output
def test_same_uid() -> None:
import gdsfactory as gf
c = Component()
c << gf.components.rectangle()
c << gf.components.rectangle()
r1 = c.references[0].parent
r2 = c.references[1].parent
assert r1.uid == r2.uid, f"{r1.uid} must equal {r2.uid}"
def test_netlist_simple() -> None:
import gdsfactory as gf
c = gf.Component()
c1 = c << gf.components.straight(length=1, width=1)
c2 = c << gf.components.straight(length=2, width=2)
c2.connect(port="o1", destination=c1.ports["o2"])
c.add_port("o1", port=c1.ports["o1"])
c.add_port("o2", port=c2.ports["o2"])
netlist = c.get_netlist()
# print(netlist.pretty())
assert len(netlist["instances"]) == 2
def test_netlist_complex() -> None:
import gdsfactory as gf
c = gf.components.mzi_arms()
netlist = c.get_netlist()
# print(netlist.pretty())
assert len(netlist["instances"]) == 4, len(netlist["instances"])
def test_extract():
import gdsfactory as gf
c = gf.components.straight(length=10, width=0.5, layers_cladding=[gf.LAYER.WGCLAD])
c2 = c.extract(layers=[gf.LAYER.WGCLAD])
assert len(c.polygons) == 2, len(c.polygons)
assert len(c2.polygons) == 1, len(c2.polygons)
def hash_file(filepath):
md5 = hashlib.md5()
md5.update(filepath.read_bytes())
return md5.hexdigest()
def test_bbox_reference():
import gdsfactory as gf
c = gf.Component("component_with_offgrid_polygons")
c1 = c << gf.components.rectangle(size=(1.5e-3, 1.5e-3), port_type=None)
c2 = c << gf.components.rectangle(size=(1.5e-3, 1.5e-3), port_type=None)
c2.xmin = c1.xmax
assert c2.xsize == 2e-3
return c2
def test_bbox_component():
import gdsfactory as gf
c = gf.components.rectangle(size=(1.5e-3, 1.5e-3), port_type=None)
assert c.xsize == 2e-3
if __name__ == "__main__":
# test_bbox_reference()
# test_bbox_component()
# import holoviews as hv
# from bokeh.plotting import output_file
# import gdsfactory as gf
# hv.extension("bokeh")
# output_file("plot.html")
# c = gf.components.rectangle(size=(4, 2), layer=(0, 0))
# c.show()
# c = gf.components.straight(length=2, info=dict(ng=4.2, wavelength=1.55))
# p = c.ploth()
# show(p)
# c = gf.Component("component_with_offgrid_polygons")
# c1 = c << gf.components.rectangle(size=(1.5e-3, 1.5e-3), port_type=None)
# c2 = c << gf.components.rectangle(size=(1.5e-3, 1.5e-3), port_type=None)
# c2.xmin = c1.xmax
# c.show()
# c = gf.Component("component_with_offgrid_polygons")
# c1 = c << gf.components.rectangle(size=(1.01e-3, 1.01e-3), port_type=None)
# c2 = c << gf.components.rectangle(size=(1.1e-3, 1.1e-3), port_type=None)
# print(c1.xmax)
# c2.xmin = c1.xmax
# c.show()
# c2 = gf.components.mzi()
# c2.show(show_subports=True)
# c2.write_gds_with_metadata("a.gds")
# print(c)
# c = Component()
# print(c.metadata_child.get('name'))
import toolz
import gdsfactory as gf
ring_te = toolz.compose(gf.routing.add_fiber_array, gf.components.ring_single)
rings = gf.grid([ring_te(radius=r) for r in [10, 20, 50]])
@gf.cell
def mask(size=(1000, 1000)):
c = gf.Component()
c << gf.components.die(size=size)
c << rings
return c
m = mask()
gdspath = m.write_gds_with_metadata(gdspath="mask.gds")
```
#### File: gdsfactory/components/bbox.py
```python
from typing import Tuple, Union
from numpy import array
import gdsfactory as gf
Coordinate = Union[Tuple[float, float], array]
@gf.cell_without_validator
def bbox(
bbox: Tuple[Coordinate, Coordinate] = ((-1.0, -1.0), (3.0, 4.0)),
layer: Tuple[int, int] = (1, 0),
top: float = 0,
bottom: float = 0,
left: float = 0,
right: float = 0,
) -> gf.Component:
"""Returns bounding box rectangle from coordinates, to allow
creation of a rectangle bounding box directly from another shape.
Args:
bbox: Coordinates of the box [(x1, y1), (x2, y2)].
layer:
top: north offset
bottom: south offset
left: west offset
right: east offset
"""
D = gf.Component()
(xmin, ymin), (xmax, ymax) = bbox
points = [
[xmin - left, ymin - bottom],
[xmax + right, ymin - bottom],
[xmax + right, ymax + top],
[xmin - left, ymax + top],
]
D.add_polygon(points, layer=layer)
return D
if __name__ == "__main__":
import gdsfactory as gf
c = gf.components.L()
c << bbox(bbox=c.bbox, top=10, left=5, right=-2)
# c = bbox()
c.show()
```
#### File: gdsfactory/components/cdsem_all.py
```python
from functools import partial
from typing import Optional, Tuple
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.bend_circular import bend_circular
from gdsfactory.components.cdsem_bend180 import cdsem_bend180
from gdsfactory.components.cdsem_straight import cdsem_straight
from gdsfactory.components.cdsem_straight_density import cdsem_straight_density
from gdsfactory.components.straight import straight as straight_function
from gdsfactory.components.text_rectangular import text_rectangular
from gdsfactory.cross_section import strip
from gdsfactory.types import ComponentFactory, CrossSectionFactory
text_rectangular_mini = partial(text_rectangular, size=1)
@cell
def cdsem_all(
widths: Tuple[float, ...] = (0.4, 0.45, 0.5, 0.6, 0.8, 1.0),
dense_lines_width: Optional[float] = 0.3,
dense_lines_width_difference: float = 20e-3,
dense_lines_gap: float = 0.3,
dense_lines_labels: Tuple[str, ...] = ("DL", "DM", "DH"),
straight: ComponentFactory = straight_function,
bend90: Optional[ComponentFactory] = bend_circular,
cross_section: CrossSectionFactory = strip,
text: ComponentFactory = text_rectangular_mini,
) -> Component:
"""column with all optical PCMs
Args:
widths: for straight lines
"""
c = Component()
_c1 = cdsem_straight(
widths=widths,
cross_section=cross_section,
)
all_devices = [_c1]
if bend90:
all_devices += [
cdsem_bend180(
width=width,
straight=straight,
bend90=bend90,
cross_section=cross_section,
text=text,
)
for width in widths
]
if dense_lines_width:
density_params = [
(
dense_lines_width - dense_lines_width_difference,
dense_lines_gap - dense_lines_width_difference,
dense_lines_labels[0],
),
(dense_lines_width, dense_lines_gap, dense_lines_labels[1]),
(
dense_lines_width + dense_lines_width_difference,
dense_lines_gap + dense_lines_width_difference,
dense_lines_labels[2],
),
]
all_devices += [
cdsem_straight_density(
widths=[w] * 10,
gaps=[g] * 10,
label=lbl,
cross_section=cross_section,
text=text,
)
for w, g, lbl in density_params
]
[c.add_ref(d) for d in all_devices]
c.align(elements="all", alignment="xmin")
c.distribute(elements="all", direction="y", spacing=5, separation=True)
return c
if __name__ == "__main__":
c = cdsem_all()
c.show()
```
#### File: gdsfactory/components/cdsem_bend180.py
```python
from functools import partial
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.bend_circular import bend_circular
from gdsfactory.components.straight import straight as straight_function
from gdsfactory.components.text_rectangular import text_rectangular
from gdsfactory.cross_section import strip
from gdsfactory.types import ComponentFactory, CrossSectionFactory
LINE_LENGTH = 420.0
text_rectangular_mini = partial(text_rectangular, size=1)
@cell
def cdsem_bend180(
width: float = 0.5,
radius: float = 10.0,
wg_length: float = LINE_LENGTH,
straight: ComponentFactory = straight_function,
bend90: ComponentFactory = bend_circular,
cross_section: CrossSectionFactory = strip,
text: ComponentFactory = text_rectangular_mini,
) -> Component:
"""
Args:
width: of the line
cladding_offset:
radius: bend radius
wg_length
"""
c = Component()
r = radius
cross_section = partial(cross_section, width=width)
if wg_length is None:
wg_length = 2 * r
bend90 = bend90(cross_section=cross_section, radius=r)
wg = straight(
cross_section=cross_section,
length=wg_length,
)
# Add the U-turn on straight layer
b1 = c.add_ref(bend90)
b2 = c.add_ref(bend90)
b2.connect("o2", b1.ports["o1"])
wg1 = c.add_ref(wg)
wg1.connect("o1", b1.ports["o2"])
wg2 = c.add_ref(wg)
wg2.connect("o1", b2.ports["o1"])
label = c << text(text=str(int(width * 1e3)))
label.ymax = b2.ymin - 5
label.x = 0
b1.rotate(90)
b2.rotate(90)
wg1.rotate(90)
wg2.rotate(90)
label.rotate(90)
return c
if __name__ == "__main__":
c = cdsem_bend180(width=2)
c.show()
```
#### File: gdsfactory/components/cdsem_straight.py
```python
from functools import partial
from typing import Optional, Tuple
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.straight import straight as straight_function
from gdsfactory.components.text_rectangular import text_rectangular
from gdsfactory.cross_section import strip
from gdsfactory.grid import grid
from gdsfactory.types import ComponentFactory, CrossSectionFactory
text_rectangular_mini = partial(text_rectangular, size=1)
LINE_LENGTH = 420.0
@cell
def cdsem_straight(
widths: Tuple[float, ...] = (0.4, 0.45, 0.5, 0.6, 0.8, 1.0),
length: float = LINE_LENGTH,
cross_section: CrossSectionFactory = strip,
text: Optional[ComponentFactory] = text_rectangular_mini,
spacing: float = 3,
) -> Component:
"""Returns straight waveguide lines width sweep.
Args:
widths: for the sweep
length: for the line
cross_section: for the lines
text: optional text for labels
spacing: edge to edge spacing
"""
lines = []
for width in widths:
cross_section = partial(cross_section, width=width)
line = straight_function(length=length, cross_section=cross_section)
if text:
line = line.copy()
t = line << text(str(int(width * 1e3)))
t.xmin = line.xmax + 5
t.y = 0
lines.append(line)
return grid(lines, spacing=(0, spacing))
if __name__ == "__main__":
c = cdsem_straight()
c.show()
```
#### File: gdsfactory/components/coupler90bend.py
```python
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.bend_euler import bend_euler
from gdsfactory.cross_section import strip
from gdsfactory.types import ComponentFactory, CrossSectionFactory
@gf.cell
def coupler90bend(
radius: float = 10.0,
gap: float = 0.2,
bend: ComponentFactory = bend_euler,
cross_section_inner: CrossSectionFactory = strip,
cross_section_outer: CrossSectionFactory = strip,
) -> Component:
r"""Returns 2 coupled bends.
Args:
radius: um
gap: um
bend: for bend
cross_section_inner:
cross_section_outer:
.. code::
r 3 4
| | |
| / /
| / /
2____/ /
1_____/
"""
c = Component()
xi = cross_section_inner()
xo = cross_section_outer()
width = xo.info["width"] / 2 + xi.info["width"] / 2
spacing = gap + width
bend90_inner = bend(radius=radius, cross_section=cross_section_inner)
bend90_outer = bend(radius=radius + spacing, cross_section=cross_section_outer)
bend_inner_ref = c << bend90_inner
bend_outer_ref = c << bend90_outer
pbw = bend_inner_ref.ports["o1"]
bend_inner_ref.movey(pbw.midpoint[1] + spacing)
# This component is a leaf cell => using absorb
c.absorb(bend_outer_ref)
c.absorb(bend_inner_ref)
c.add_port("o1", port=bend_outer_ref.ports["o1"])
c.add_port("o2", port=bend_inner_ref.ports["o1"])
c.add_port("o3", port=bend_inner_ref.ports["o2"])
c.add_port("o4", port=bend_outer_ref.ports["o2"])
return c
if __name__ == "__main__":
c = coupler90bend(radius=3)
c.show()
```
#### File: gdsfactory/components/coupler.py
```python
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.coupler_straight import (
coupler_straight as coupler_straight_function,
)
from gdsfactory.components.coupler_symmetric import (
coupler_symmetric as coupler_symmetric_function,
)
from gdsfactory.cross_section import strip
from gdsfactory.snap import assert_on_2nm_grid, snap_to_grid
from gdsfactory.types import ComponentFactory, CrossSectionFactory
@gf.cell
def coupler(
gap: float = 0.236,
length: float = 20.0,
coupler_symmetric: ComponentFactory = coupler_symmetric_function,
coupler_straight: ComponentFactory = coupler_straight_function,
dy: float = 5.0,
dx: float = 10.0,
cross_section: CrossSectionFactory = strip,
**kwargs
) -> Component:
r"""Symmetric coupler.
Args:
gap: between straights
length: of coupling region
coupler_symmetric
coupler_straight
dy: port to port vertical spacing
dx: length of bend in x direction
cross_section: factory
kwargs: cross_section settings
.. code::
dx dx
|------| |------|
o2 ________ ______o3
\ / |
\ length / |
======================= gap | dy
/ \ |
________/ \_______ |
o1 o4
coupler_straight coupler_symmetric
"""
length = snap_to_grid(length)
assert_on_2nm_grid(gap)
c = Component()
sbend = coupler_symmetric(
gap=gap, dy=dy, dx=dx, cross_section=cross_section, **kwargs
)
sr = c << sbend
sl = c << sbend
cs = c << coupler_straight(
length=length, gap=gap, cross_section=cross_section, **kwargs
)
sl.connect("o2", destination=cs.ports["o1"])
sr.connect("o1", destination=cs.ports["o4"])
c.add_port("o1", port=sl.ports["o3"])
c.add_port("o2", port=sl.ports["o4"])
c.add_port("o3", port=sr.ports["o3"])
c.add_port("o4", port=sr.ports["o4"])
c.absorb(sl)
c.absorb(sr)
c.absorb(cs)
c.info["length"] = sbend.info["length"]
c.info["min_bend_radius"] = sbend.info["min_bend_radius"]
c.auto_rename_ports()
return c
if __name__ == "__main__":
# c = gf.Component()
# cp1 = c << coupler(gap=0.2)
# cp2 = c << coupler(gap=0.5)
# cp1.ymin = 0
# cp2.ymin = 0
# layer = (2, 0)
# c = coupler(gap=0.300, layer=layer)
c = coupler(cross_section=gf.cross_section.rib)
c.show(show_subports=True)
```
#### File: gdsfactory/components/die_bbox_frame.py
```python
from typing import Optional, Tuple, Union
import numpy as np
from numpy import array
import gdsfactory as gf
from gdsfactory.components.text import text
from gdsfactory.types import Anchor, Layer
Coordinate = Union[Tuple[float, float], array]
@gf.cell_without_validator
def die_bbox_frame(
bbox: Tuple[Coordinate, Coordinate] = ((-1.0, -1.0), (3.0, 4.0)),
street_width: float = 100.0,
street_length: float = 1000.0,
die_name: Optional[str] = None,
text_size: float = 100.0,
text_anchor: Anchor = "sw",
layer: Layer = (49, 0),
padding: float = 10.0,
) -> gf.Component:
"""Return boundary box frame. Perfect for defining dicing lanes.
the boundary of the chip/die
it can also add a label with the name of the die.
similar to die and bbox
adapted from phidl.geometry
Args:
bbox: bounding box to frame. Component.bbox
street_width: Width of the boundary box
street_length: length of the boundary box
die_name: Label text.
text_size: Label text size.
text_anchor: {'nw', 'nc', 'ne', 'sw', 'sc', 'se'} text location.
layer: Specific layer(s) to put polygon geometry on.
padding: adds padding
"""
D = gf.Component()
(xmin, ymin), (xmax, ymax) = bbox
x = (xmax + xmin) / 2
y = (ymax + ymin) / 2
sx = xmax - xmin
sy = ymax - ymin
sx = sx / 2
sy = sy / 2
sx += street_width + padding
sy += street_width + padding
street_length = max([sx, sy])
xpts = np.array(
[
sx,
sx,
sx - street_width,
sx - street_width,
sx - street_length,
sx - street_length,
]
)
ypts = np.array(
[
sy,
sy - street_length,
sy - street_length,
sy - street_width,
sy - street_width,
sy,
]
)
D.add_polygon([+xpts, +ypts], layer=layer)
D.add_polygon([-xpts, +ypts], layer=layer)
D.add_polygon([+xpts, -ypts], layer=layer)
D.add_polygon([-xpts, -ypts], layer=layer)
if die_name:
t = D.add_ref(text(text=die_name, size=text_size, layer=layer))
d = street_width + 20
if text_anchor == "nw":
t.xmin, t.ymax = [-sx + d, sy - d]
elif text_anchor == "nc":
t.x, t.ymax = [0, sy - d]
elif text_anchor == "ne":
t.xmax, t.ymax = [sx - d, sy - d]
if text_anchor == "sw":
t.xmin, t.ymin = [-sx + d, -sy + d]
elif text_anchor == "sc":
t.x, t.ymin = [0, -sy + d]
elif text_anchor == "se":
t.xmax, t.ymin = [sx - d, -sy + d]
return D.move((x, y)).flatten()
if __name__ == "__main__":
c = gf.Component("demo")
mask = c << gf.components.array(rows=15, columns=10)
c << die_bbox_frame(mask.bbox, die_name="chip99")
c.show()
```
#### File: gdsfactory/components/grating_coupler_elliptical_arbitrary.py
```python
from typing import Optional, Tuple
import numpy as np
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.grating_coupler_elliptical import (
grating_taper_points,
grating_tooth_points,
)
from gdsfactory.cross_section import strip as xs_strip
from gdsfactory.geometry.functions import DEG2RAD
from gdsfactory.tech import LAYER
from gdsfactory.types import CrossSectionOrFactory, Floats, Layer
_gaps = (0.1,) * 10
_widths = (0.5,) * 10
@gf.cell
def grating_coupler_elliptical_arbitrary(
gaps: Floats = _gaps,
widths: Floats = _widths,
wg_width: float = 0.5,
taper_length: float = 16.6,
taper_angle: float = 60.0,
layer: Tuple[int, int] = LAYER.WG,
wavelength: float = 1.554,
fiber_angle: float = 15.0,
neff: float = 2.638, # tooth effective index
nclad: float = 1.443,
layer_slab: Optional[Tuple[int, int]] = LAYER.SLAB150,
slab_xmin: float = -3.0,
polarization: str = "te",
fiber_marker_width: float = 11.0,
fiber_marker_layer: Optional[Layer] = gf.LAYER.TE,
spiked: bool = True,
cross_section: CrossSectionOrFactory = xs_strip,
) -> Component:
r"""Grating coupler with parametrization based on Lumerical FDTD simulation.
The ellipticity is derived from Lumerical knowdledge base
it depends on fiber_angle (degrees), neff, and nclad
Args:
gaps:
widths:
wg_width: waveguide width
taper_length: taper length from input
taper_angle: grating flare angle
layer: LAYER.WG
wavelength: grating transmission central wavelength (um)
fiber_angle: fibre angle in degrees determines ellipticity
neff: tooth effective index
nclad: cladding effective index
layer_slab: Optional slab
slab_xmin: where 0 is at the start of the taper
polarization: te or tm
fiber_marker_width
fiber_marker_layer
spiked: grating teeth have sharp spikes to avoid non-manhattan drc errors
https://en.wikipedia.org/wiki/Ellipse
c = (a1 ** 2 - b1 ** 2) ** 0.5
e = (1 - (b1 / a1) ** 2) ** 0.5
print(e)
.. code::
fiber
/ / / /
/ / / /
_|-|_|-|_|-|___ layer
layer_slab |
o1 ______________|
"""
# Compute some ellipse parameters
sthc = np.sin(fiber_angle * DEG2RAD)
d = neff ** 2 - nclad ** 2 * sthc ** 2
a1 = wavelength * neff / d
b1 = wavelength / np.sqrt(d)
x1 = wavelength * nclad * sthc / d
a1 = round(a1, 3)
b1 = round(b1, 3)
x1 = round(x1, 3)
period = a1 + x1
c = gf.Component()
c.info["polarization"] = polarization
c.info["wavelength"] = wavelength
gaps = gf.snap.snap_to_grid(gaps)
widths = gf.snap.snap_to_grid(widths)
xi = taper_length
for gap, width in zip(gaps, widths):
xi += gap + width / 2
p = xi / period
pts = grating_tooth_points(
p * a1, p * b1, p * x1, width, taper_angle, spiked=spiked
)
c.add_polygon(pts, layer)
xi += width / 2
# Make the taper
p = taper_length / period
a_taper = p * a1
b_taper = p * b1
x_taper = p * x1
x_output = a_taper + x_taper - taper_length + widths[0] / 2
pts = grating_taper_points(
a_taper, b_taper, x_output, x_taper, taper_angle, wg_width=wg_width
)
c.add_polygon(pts, layer)
x = (taper_length + xi) / 2
name = f"vertical_{polarization.lower()}"
c.add_port(
name=name,
midpoint=[x, 0],
width=fiber_marker_width,
orientation=0,
layer=fiber_marker_layer,
port_type=name,
)
# Add port
xs = (
cross_section(width=wg_width, layer=layer)
if callable(cross_section)
else cross_section
)
c.add_port(
name="o1",
midpoint=[x_output, 0],
width=wg_width,
orientation=180,
layer=layer,
cross_section=xs,
)
if layer_slab:
slab_xmin += taper_length
slab_xsize = xi + 2.0
slab_ysize = c.ysize + 2.0
yslab = slab_ysize / 2
c.add_polygon(
[
(slab_xmin, yslab),
(slab_xsize, yslab),
(slab_xsize, -yslab),
(slab_xmin, -yslab),
],
layer_slab,
)
if fiber_marker_layer:
circle = gf.components.circle(
radius=fiber_marker_width / 2, layer=fiber_marker_layer
)
circle_ref = c.add_ref(circle)
circle_ref.movex(x)
return c
if __name__ == "__main__":
c = grating_coupler_elliptical_arbitrary(fiber_angle=8)
c.show()
```
#### File: gdsfactory/components/grating_coupler_rectangular_arbitrary.py
```python
from typing import Optional, Tuple
import numpy as np
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.rectangle import rectangle
from gdsfactory.components.taper import taper as taper_function
from gdsfactory.tech import LAYER
from gdsfactory.types import ComponentFactory, Floats, Layer
_gaps = (0.2,) * 10
_widths = (0.5,) * 10
@gf.cell
def grating_coupler_rectangular_arbitrary(
gaps: Floats = _gaps,
widths: Floats = _widths,
wg_width: float = 0.5,
width_grating: float = 11.0,
length_taper: float = 150.0,
layer: Tuple[int, int] = gf.LAYER.WG,
polarization: str = "te",
wavelength: float = 1.55,
taper: Optional[ComponentFactory] = taper_function,
layer_grating: Optional[Layer] = None,
layer_slab: Optional[Tuple[int, int]] = LAYER.SLAB150,
slab_xmin: float = -1.0,
slab_offset: float = 1.0,
) -> Component:
r"""Grating coupler uniform (grating with rectangular shape not elliptical).
Therefore it needs a longer taper.
Grating teeth are straight instead of elliptical.
Args:
gaps: list of gaps
widths: list of widths
wg_width: input waveguide width
width_grating:
length_taper:
layer: for grating teeth
polarization: 'te' or 'tm'
wavelength: in um
taper: function
layer_grating:
layer_slab: layer that protects the slab under the grating
slab_xmin: where 0 is at the start of the taper
slab_offset: from edge of grating to edge of the slab
.. code::
fiber
/ / / /
/ / / /
_|-|_|-|_|-|___ layer
layer_slab |
o1 ______________|
top view _________
/| | | | |
/ | | | | |
/taper_angle
/_ _| | | | |
wg_width | | | | | |
\ | | | | |
\ | | | | |
\ | | | | |
\|_|_|_|_|
<-->
taper_length
"""
c = Component()
if taper:
taper_ref = c << taper(
length=length_taper,
width2=width_grating,
width1=wg_width,
layer=layer,
)
c.add_port(port=taper_ref.ports["o1"], name="o1")
xi = taper_ref.xmax
else:
length_taper = 0
xi = 0
widths = gf.snap.snap_to_grid(widths)
gaps = gf.snap.snap_to_grid(gaps)
for width, gap in zip(widths, gaps):
xi += gap + width / 2
cgrating = c.add_ref(
rectangle(
size=[width, width_grating],
layer=layer,
port_type=None,
centered=True,
)
)
cgrating.x = gf.snap.snap_to_grid(xi)
cgrating.y = 0
xi += width / 2
if layer_slab:
slab_xmin += length_taper
slab_xsize = xi + slab_offset
slab_ysize = c.ysize + 2 * slab_offset
yslab = slab_ysize / 2
c.add_polygon(
[
(slab_xmin, yslab),
(slab_xsize, yslab),
(slab_xsize, -yslab),
(slab_xmin, -yslab),
],
layer_slab,
)
xport = np.round((xi + length_taper) / 2, 3)
port_type = f"vertical_{polarization.lower()}"
c.add_port(name=port_type, port_type=port_type, midpoint=(xport, 0), orientation=0)
c.info["polarization"] = polarization
c.info["wavelength"] = wavelength
gf.asserts.grating_coupler(c)
return c
if __name__ == "__main__":
c = grating_coupler_rectangular_arbitrary()
print(c.ports)
c.show()
```
#### File: gdsfactory/components/litho_ruler.py
```python
from typing import Tuple
import gdsfactory as gf
@gf.cell
def litho_ruler(
height: float = 2,
width: float = 0.5,
spacing: float = 2.0,
scale: Tuple[float] = (3, 1, 1, 1, 1, 2, 1, 1, 1, 1),
num_marks: int = 21,
layer: Tuple[int, int] = (1, 0),
) -> gf.Component():
"""Creates a ruler structure for lithographic measurement with marks of
varying scales to allow for easy reading by eye.
adapted from phidl.geometry
Args:
height : Height of the ruling marks.
width : Width of the ruling marks.
spacing : Center-to-center spacing of the ruling marks
scale : Height scale pattern of marks
num_marks : Total number of marks to generate
layer: Specific layer to put the ruler geometry on.
"""
D = gf.Component("litho_ruler")
for n in range(num_marks):
h = height * scale[n % len(scale)]
D << gf.components.rectangle(size=(width, h), layer=layer)
D.distribute(direction="x", spacing=spacing, separation=False, edge="x")
D.align(alignment="ymin")
D.flatten()
return D
if __name__ == "__main__":
c = litho_ruler()
c.show()
```
#### File: gdsfactory/components/pad.py
```python
from functools import partial
from typing import Optional, Tuple
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.compass import compass
from gdsfactory.tech import LAYER
from gdsfactory.types import ComponentOrFactory, Layer
@cell
def pad(
size: Tuple[float, float] = (100.0, 100.0),
layer: Layer = LAYER.M3,
layers_cladding: Optional[Tuple[Layer, ...]] = None,
cladding_offsets: Optional[Tuple[float, ...]] = None,
port_inclusion: float = 0,
) -> Component:
"""Rectangular pad with 4 ports (1, 2, 3, 4)
Args:
size:
layer: pad layer
layers_cladding:
cladding_offsets:
port_inclusion: from edge
"""
c = Component()
rect = compass(size=size, layer=layer, port_inclusion=port_inclusion)
c_ref = c.add_ref(rect)
c.add_ports(c_ref.ports)
c.info["size"] = (float(size[0]), float(size[1]))
c.info["layer"] = layer
if layers_cladding and cladding_offsets:
for layer, cladding_offset in zip(layers_cladding, cladding_offsets):
c.add_ref(
compass(
size=(size[0] + 2 * cladding_offset, size[1] + 2 * cladding_offset),
layer=layer,
)
)
c.add_port(name="pad", port_type="vertical_dc", layer=layer, orientation=0)
return c
@cell
def pad_array(
pad: ComponentOrFactory = pad,
spacing: Tuple[float, float] = (150.0, 150.0),
columns: int = 6,
rows: int = 1,
orientation: float = 270,
) -> Component:
"""Returns 2D array of pads
Args:
pad: pad element
spacing: x, y pitch
columns:
rows:
orientation: port orientation in deg
"""
c = Component()
pad = pad() if callable(pad) else pad
size = pad.settings.full["size"]
c.info["size"] = size
c.add_array(pad, columns=columns, rows=rows, spacing=spacing)
width = size[0] if orientation in [90, 270] else size[1]
for col in range(columns):
for row in range(rows):
c.add_port(
name=f"e{row+1}{col+1}",
midpoint=(col * spacing[0], row * spacing[1]),
width=width,
orientation=orientation,
port_type="electrical",
layer=pad.info["layer"],
)
return c
pad_array90 = partial(pad_array, orientation=90)
pad_array270 = partial(pad_array, orientation=270)
pad_array0 = partial(pad_array, orientation=0, columns=1, rows=3)
pad_array180 = partial(pad_array, orientation=180, columns=1, rows=3)
if __name__ == "__main__":
# c = pad()
# c = pad(layer_to_inclusion={(3, 0): 10})
# print(c.ports)
# c = pad(width=10, height=10)
# print(c.ports.keys())
# c = pad_array90()
c = pad_array0()
# c = pad_array270()
# c.pprint_ports()
# c = pad_array_2d(cols=2, rows=3, port_names=("e2",))
# c = pad_array(columns=2, rows=2, orientation=270)
# c.auto_rename_ports()
c.show()
```
#### File: gdsfactory/components/ramp.py
```python
from typing import Optional
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.types import Layer
@gf.cell
def ramp(
length: float = 10.0,
width1: float = 5.0,
width2: Optional[float] = 8.0,
layer: Layer = (1, 0),
) -> Component:
"""Return a ramp component. Based on phidl.
Args:
length: Length of the ramp section.
width1: Width of the start of the ramp section.
width2: Width of the end of the ramp section (defaults to width1).
layer: Specific layer to put polygon geometry on.
"""
if width2 is None:
width2 = width1
xpts = [0, length, length, 0]
ypts = [width1, width2, 0, 0]
c = Component()
c.add_polygon([xpts, ypts], layer=layer)
c.add_port(name="o1", midpoint=[0, width1 / 2], width=width1, orientation=180)
c.add_port(name="o2", midpoint=[length, width2 / 2], width=width2, orientation=0)
return c
if __name__ == "__main__":
c = ramp()
c.show()
```
#### File: gdsfactory/components/waveguide_template.py
```python
import picwriter.components as pc
from picwriter.components.waveguide import WaveguideTemplate
from gdsfactory.types import Layer
def strip(
wg_width: float = 0.5,
layer: Layer = (1, 0),
layer_cladding: Layer = (111, 0),
radius: float = 10.0,
cladding_offset: float = 3.0,
euler_bend: bool = True,
wg_type: str = "strip",
) -> WaveguideTemplate:
"""
wg_type: strip, slot, and swg (subwavelength)
resist: Specifies the type of photoresist used (+ or -)
"""
return pc.WaveguideTemplate(
bend_radius=radius,
wg_width=wg_width,
wg_layer=layer[0],
wg_datatype=layer[1],
clad_layer=layer_cladding[0],
clad_datatype=layer_cladding[1],
clad_width=cladding_offset,
wg_type=wg_type,
euler_bend=euler_bend,
)
if __name__ == "__main__":
c = strip()
```
#### File: gdsfactory/gdsfactory/coord2.py
```python
from typing import Tuple, Union
import numpy as np
class Coord2:
def __init__(self, x: float, y: float) -> None:
self.point = np.array([x, y])
def __getitem__(self, i: int) -> float:
return self.point[i]
@property
def x(self) -> float:
return self.point[0]
@property
def y(self) -> float:
return self.point[1]
@property
def xy(self) -> Tuple[float, float]:
return self.x, self.y
def __add__(self, c2: Union[Tuple[float, float], "Coord2"]) -> "Coord2":
return Coord2(self[0] + c2[0], self[1] + c2[1])
def __mul__(self, a: float) -> "Coord2":
return Coord2(self[0] * a, self[1] * a)
def __rmul__(self, a: float) -> "Coord2":
return Coord2(self[0] * a, self[1] * a)
def __str__(self) -> str:
return f"Coord2({self[0]}, {self[1]})"
def __repr__(self) -> str:
return f"Coord2({self[0]}, {self[1]})"
if __name__ == "__main__":
p0 = Coord2(1.0, 1.5)
p1 = Coord2(2.0, 0.0)
p2 = p0 + p1
p3 = p0 * 2
p4 = 2 * p0
p5 = p3 + (0.0, 5.0)
print(p2)
print(p5)
print(p3.x, p3.y)
```
#### File: gdsfactory/export/to_3d.py
```python
from typing import Optional, Tuple
import matplotlib.colors
import shapely
from gdsfactory.component import Component
from gdsfactory.layers import LayerSet
from gdsfactory.tech import LAYER_STACK, LayerStack
from gdsfactory.types import Layer
def to_3d(
component: Component,
layer_set: LayerSet,
layer_stack: LayerStack = LAYER_STACK,
exclude_layers: Optional[Tuple[Layer, ...]] = None,
):
"""Return Component 3D trimesh Scene.
Args:
component:
layer_set: layer colors from Klayout Layer Properties file
layer_stack: contains thickness and zmin for each layer
exclude_layers: layers to exclude
"""
try:
from trimesh.creation import extrude_polygon
from trimesh.scene import Scene
except ImportError:
print("you need to `pip install trimesh`")
scene = Scene()
layer_to_thickness = layer_stack.get_layer_to_thickness()
layer_to_zmin = layer_stack.get_layer_to_zmin()
exclude_layers = exclude_layers or []
for layer, polygons in component.get_polygons(by_spec=True).items():
if (
layer not in exclude_layers
and layer in layer_to_thickness
and layer in layer_to_zmin
):
height = layer_to_thickness[layer]
zmin = layer_to_zmin[layer]
color_hex = layer_set.get_from_tuple(layer).color
color_rgb = matplotlib.colors.to_rgb(color_hex)
for polygon in polygons:
p = shapely.geometry.Polygon(polygon)
mesh = extrude_polygon(p, height=height)
mesh.apply_translation((0, 0, zmin))
mesh.visual.face_colors = (*color_rgb, 0.5)
scene.add_geometry(mesh)
return scene
if __name__ == "__main__":
import gdsfactory as gf
c = gf.components.taper_strip_to_ridge()
s = to_3d(c, layer_set=gf.layers.LAYER_SET)
s.show()
```
#### File: gdsfactory/geometry/boolean.py
```python
from typing import Tuple, Union
import phidl.geometry as pg
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.types import ComponentOrReference, Int2, Layer
@gf.cell
def boolean(
A: Union[ComponentOrReference, Tuple[ComponentOrReference, ...]],
B: Union[ComponentOrReference, Tuple[ComponentOrReference, ...]],
operation: str,
precision: float = 1e-4,
num_divisions: Union[int, Int2] = (1, 1),
max_points: int = 4000,
layer: Layer = (1, 0),
) -> Component:
"""Performs boolean operations between 2 Component/Reference objects,
or lists of Devices/DeviceReferences.
``operation`` should be one of {'not', 'and', 'or', 'xor', 'A-B', 'B-A', 'A+B'}.
Note that 'A+B' is equivalent to 'or', 'A-B' is equivalent to 'not', and
'B-A' is equivalent to 'not' with the operands switched
gdsfactory wrapper for phidl.geometry.boolean
You can also use gdsfactory.drc.boolean that uses Klayout backend
Args:
A: Component(/Reference) or list of Component(/References)
B: Component(/Reference) or list of Component(/References)
operation: {'not', 'and', 'or', 'xor', 'A-B', 'B-A', 'A+B'}
precision: float Desired precision for rounding vertex coordinates.
num_divisions: number of divisions with which the geometry is divided into
multiple rectangular regions. This allows for each region to be
processed sequentially, which is more computationally efficient.
max_points: The maximum number of vertices within the resulting polygon.
layer: Specific layer to put polygon geometry on.
Returns: Component with polygon(s) of the boolean operations between
the 2 input Devices performed.
Notes
-----
'A+B' is equivalent to 'or'.
'A-B' is equivalent to 'not'.
'B-A' is equivalent to 'not' with the operands switched.
"""
A = list(A) if isinstance(A, tuple) else A
B = list(B) if isinstance(B, tuple) else B
c = pg.boolean(
A=A,
B=B,
operation=operation,
precision=precision,
num_divisions=num_divisions,
max_points=max_points,
layer=layer,
)
return gf.read.from_phidl(component=c)
def test_boolean():
c = gf.Component()
e1 = c << gf.components.ellipse()
e2 = c << gf.components.ellipse(radii=(10, 6))
e3 = c << gf.components.ellipse(radii=(10, 4))
e3.movex(5)
e2.movex(2)
c = boolean(A=[e1, e3], B=e2, operation="A-B")
assert len(c.polygons) == 2, len(c.polygons)
if __name__ == "__main__":
c = gf.Component()
e1 = c << gf.components.ellipse()
e2 = c << gf.components.ellipse(radii=(10, 6))
e3 = c << gf.components.ellipse(radii=(10, 4))
e3.movex(5)
e2.movex(2)
c = boolean(A=[e1, e3], B=e2, operation="A-B")
c.show()
```
#### File: gdsfactory/geometry/compute_area.py
```python
import gdspy as gp
from numpy import float64
from gdsfactory.component import Component
from gdsfactory.geometry.functions import area
from gdsfactory.types import Dict, Layer, Tuple
def bucket_cells_by_rank(cells):
cells = list(cells)
rank = 0
rank_to_cells = {}
all_classified_cells = set()
prev_len_cells = -1
while cells:
classified_cells = set()
to_rm = []
for i, c in enumerate(cells):
_cells = c.get_dependencies(recursive=False)
unclassified_subcells = _cells - all_classified_cells
if len(unclassified_subcells) == 0:
classified_cells.update([c])
to_rm += [i]
if prev_len_cells == len(cells):
# print(cells)
raise ValueError("Error: some cells cannot be linked")
prev_len_cells = len(cells)
while to_rm:
cells.pop(to_rm.pop())
rank_to_cells[rank] = classified_cells
all_classified_cells.update(classified_cells)
rank += 1
return rank_to_cells
def get_polygons_on_layer(c, layer):
polygons = []
for polyset in c.polygons:
for ii in range(len(polyset.polygons)):
key = (polyset.layers[ii], polyset.datatypes[ii])
if key == layer:
polygons.append(polyset.polygons[ii])
return polygons
def compute_area(component: Component, layer: Layer) -> float64:
"""Returns Computed area of the component for a given layer."""
c = component.copy()
c.flatten()
polys_by_spec = c.get_polygons(by_spec=True)
_area = 0
for layer_polygons, polys in polys_by_spec.items():
# print(layer)
if layer_polygons == layer:
joined_polys = gp.boolean(polys, None, operation="or")
# print(joined_polys)
try:
_area += sum([abs(area(p)) for p in joined_polys.polygons])
except BaseException:
print(f"Warning, {c.name} joinedpoly {joined_polys} could not be added")
return _area
def compute_area_hierarchical(
component: Component,
layer: Layer,
func_check_to_flatten=None,
keep_zero_area_cells: bool = False,
) -> Dict[str, Tuple[float, int]]:
"""Compute area of the component on a given layer
Faster than `compute_area` but need to be careful if the cells overlap
Can pass a list of cells to flatten
Returns Dict[key of each cell, Tuple[area, rank (position in hierarchy)]
Args:
component:
layer:
func_check_to_flatten:
keep_zero_area_cells:removes zero area cells
"""
all_cells = component.get_dependencies(recursive=True)
all_cells.update([component])
cells_by_rank = bucket_cells_by_rank(all_cells)
# print("Found the hierarchy...")
cell_to_area = {}
cell_to_rank = {}
if func_check_to_flatten is None:
def _has_polygons(cell):
polys = get_polygons_on_layer(cell, layer)
return len(polys)
func_check_to_flatten = _has_polygons
for rank, cells in cells_by_rank.items():
for cell in cells:
to_flatten = func_check_to_flatten(cell)
if to_flatten:
# print("CAH - TO FLATTEN", to_flatten)
_area = compute_area(cell, layer)
else:
# _cell_area_by_spec = cell.area(by_spec=True)
_area = 0
# _cell_area_by_spec[layer] if layer in _cell_area_by_spec else 0
# _area = 0
# print("CAH - ",cell.name)
for ref in cell.references:
_area += cell_to_area[ref.ref_cell.name]
# print(
# "CAH {} {:.1f} {}".format(cell.name, _area, len(cell.references))
# )
cell_to_area[cell.name] = _area
cell_to_rank[cell.name] = rank
to_rm = []
if not keep_zero_area_cells:
for k, v in cell_to_area.items():
if v == 0:
to_rm += [k]
while to_rm:
cell_to_area.pop(to_rm.pop())
cell_to_data = {}
for k, v in cell_to_area.items():
cell_to_data[k] = (v, cell_to_rank[k])
return cell_to_data
def test_compute_area():
import gdsfactory as gf
c = gf.components.mzi()
assert int(compute_area(c, layer=(1, 0))) == 148, int(compute_area(c, layer=(1, 0)))
def test_compute_area_hierarchical():
import gdsfactory as gf
c = gf.components.mzi()
assert int(compute_area_hierarchical(c, layer=(1, 0))[c.name][0]) == 148, int(
compute_area_hierarchical(c, layer=(1, 0))[c.name][0]
)
if __name__ == "__main__":
test_compute_area_hierarchical()
# test_compute_area()
# import gdsfactory as gf
# print(bucket_cells_by_rank([c] + list(c.get_dependencies(recursive=True))))
# c = gf.components.mzi()
# print(compute_area(c, layer=(1, 0)))
# d = compute_area_hierarchical(c, layer=(1, 0))
# c.show()
# test_compute_area_hierarchical()
# test_compute_area()
```
#### File: gdsfactory/geometry/write_drc.py
```python
import pathlib
from dataclasses import asdict, is_dataclass
from typing import List, Optional
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal
from gdsfactory.config import logger
from gdsfactory.install import get_klayout_path
from gdsfactory.types import Dict, Layer, PathType
layer_name_to_min_width: Dict[str, float]
RuleType = Literal[
"width",
"space",
"enclosing",
]
def rule_width(value: float, layer: str, angle_limit: float = 90) -> str:
"""Min feature size"""
category = "width"
error = f"{layer} {category} {value}um"
return (
f"{layer}.{category}({value}, angle_limit({angle_limit}))"
f".output('{error}', '{error}')"
)
def rule_space(value: float, layer: str, angle_limit: float = 90) -> str:
"""Min Space between shapes of layer"""
category = "space"
error = f"{layer} {category} {value}um"
return (
f"{layer}.{category}({value}, angle_limit({angle_limit}))"
f".output('{error}', '{error}')"
)
def rule_separation(value: float, layer1: str, layer2: str):
"""Min space between different layers"""
error = f"min {layer1} {layer2} separation {value}um"
return f"{layer1}.separation({layer2}, {value})" f".output('{error}', '{error}')"
def rule_enclosing(
value: float, layer1: str, layer2: str, angle_limit: float = 90
) -> str:
"""Layer1 must be enclosed by layer2 by value.
checks if layer1 encloses (is bigger than) layer2 by value
"""
error = f"{layer1} enclosing {layer2} by {value}um"
return (
f"{layer1}.enclosing({layer2}, angle_limit({angle_limit}), {value})"
f".output('{error}', '{error}')"
)
def write_layer_definition(layer_map: Dict[str, Layer]) -> str:
"""Returns layer_map definition script for klayout
Args:
layer_map: can be dict or dataclass
"""
layer_map = asdict(layer_map) if is_dataclass(layer_map) else layer_map
return [
f"{key} = input({value[0]}, {value[1]})" for key, value in layer_map.items()
]
def write_drc_deck(rules: List[str], layer_map: Dict[str, Layer]) -> str:
"""Returns drc_rule_deck for klayou
Args:
rules: list of rules
layer_map: layer definitions can be dict or dataclass
"""
script = []
script += write_layer_definition(layer_map=layer_map)
script += ["\n"]
script += rules
return "\n".join(script)
def write_drc_deck_macro(
name="generic",
filepath: Optional[PathType] = None,
shortcut: str = "Ctrl+Shift+D",
**kwargs,
) -> str:
"""Write script for klayout rule deck
Args:
name: drc rule deck name
filepath: Optional macro path (defaults to .klayout/drc/name.lydrc)
Keyword Args:
rules: list of rules
layer_map: layer definitions can be dict or dataclass
Keyword Args:
rules: list of rules
layer_map: layer definitions can be dict or dataclass
"""
script = f"""<?xml version="1.0" encoding="utf-8"?>
<klayout-macro>
<description>{name} DRC</description>
<version/>
<category>drc</category>
<prolog/>
<epilog/>
<doc/>
<autorun>false</autorun>
<autorun-early>false</autorun-early>
<shortcut>{shortcut}</shortcut>
<show-in-menu>true</show-in-menu>
<group-name>drc_scripts</group-name>
<menu-path>tools_menu.drc.end</menu-path>
<interpreter>dsl</interpreter>
<dsl-interpreter-name>drc-dsl-xml</dsl-interpreter-name>
<text># {name} DRC
# Read about DRC scripts in the User Manual under "Design Rule Check (DRC)"
# Based on SOEN pdk https://github.com/usnistgov/SOEN-PDK/tree/master/tech/OLMAC
# http://klayout.de/doc/manual/drc_basic.html
report("generic DRC")
tiles(100)
tile_borders(2)
threads(3)
"""
script += write_drc_deck(**kwargs)
script += """
</text>
</klayout-macro>
"""
filepath = filepath or get_klayout_path() / "drc" / f"{name}.lydrc"
filepath = pathlib.Path(filepath)
filepath.write_text(script)
logger.info(f"Wrote DRC deck to {filepath}")
return script
if __name__ == "__main__":
import gdsfactory as gf
rules = [
rule_width(layer="WG", value=0.2),
rule_space(layer="WG", value=0.2),
rule_width(layer="M1", value=1),
rule_width(layer="M2", value=2),
rule_space(layer="M2", value=2),
rule_separation(layer1="HEATER", layer2="M1", value=1.0),
rule_enclosing(layer1="M1", layer2="VIAC", value=0.2),
]
drc_rule_deck = write_drc_deck_macro(rules=rules, layer_map=gf.LAYER)
print(drc_rule_deck)
```
#### File: gdsfactory/gdsfactory/hash_points.py
```python
import hashlib
from typing import Tuple
Floats = Tuple[float, ...]
def format_float(x: float) -> str:
return "{:.3f}".format(x).rstrip("0").rstrip(".")
def _fmt_cp(cps: Floats) -> str:
return "_".join([f"({format_float(p[0])},{format_float(p[1])})" for p in cps])
def hash_points(points: Floats) -> str:
return hashlib.md5(_fmt_cp(points).encode()).hexdigest()
```
#### File: gdsfactory/mask/merge_metadata.py
```python
from pathlib import Path
from typing import Tuple
from omegaconf import DictConfig, OmegaConf
import gdsfactory as gf
from gdsfactory.mask.merge_markdown import merge_markdown
from gdsfactory.mask.merge_test_metadata import merge_test_metadata
# from gdsfactory.mask.merge_json import merge_json
from gdsfactory.mask.merge_yaml import merge_yaml
from gdsfactory.mask.write_labels import write_labels
def merge_metadata(
gdspath: Path,
labels_prefix: str = "opt",
layer_label: Tuple[int, int] = gf.LAYER.TEXT,
) -> DictConfig:
"""Merges all mask metadata and returns test metadata
This function works well only when you define the mask in YAML
Exports YAML file with only components with a measurement label
This is the automatic version of write_labels combined with merge_test_metadata
.. code::
CSV labels -------
|--> merge_test_metadata dict
|
YAML metatada ----
Args:
gdspath: GDSpath
labels_prefix
layer_label: layer for the labels
"""
mdpath = gdspath.with_suffix(".md")
yaml_path = gdspath.with_suffix(".yml")
test_metadata_path = gdspath.with_suffix(".tp.yml")
build_directory = gdspath.parent.parent
doe_directory = build_directory / "cache_doe"
labels_path = write_labels(
gdspath=gdspath, prefix=labels_prefix, layer_label=layer_label
)
mask_metadata = merge_yaml(doe_directory=doe_directory, yaml_path=yaml_path)
merge_markdown(reports_directory=doe_directory, mdpath=mdpath)
tm = merge_test_metadata(
labels_prefix=labels_prefix,
mask_metadata=mask_metadata,
labels_path=labels_path,
)
test_metadata_path.write_text(OmegaConf.to_yaml(tm))
return tm
if __name__ == "__main__":
gdspath = (
gf.CONFIG["samples_path"] / "mask_custom" / "build" / "mask" / "sample_mask.gds"
)
tm = merge_metadata(gdspath)
```
#### File: gdsfactory/mask/merge_yaml.py
```python
from typing import Any, Dict, Optional
from omegaconf import OmegaConf
from gdsfactory.config import logger
from gdsfactory.types import PathType
def merge_yaml(
doe_directory: PathType,
yaml_path: Optional[PathType] = None,
json_version: int = 6,
) -> Dict[str, Any]:
"""Combine several YAML files
in the root of the mask directory, gets mask_name from there
Args:
doe_directory: defaults to current working directory
extra_directories: list of extra_directories
yaml_path: optional metadata path to write metadata
json_version:
"""
logger.debug(f"Merging JSON files from {doe_directory}")
cells = {}
for filename in doe_directory.glob("**/*.yml"):
logger.debug(f"merging {filename}")
metadata = OmegaConf.load(filename)
metadata = OmegaConf.to_container(metadata)
cells.update(metadata.get("cells", {}))
metadata = dict(
json_version=json_version,
cells=cells,
)
if yaml_path:
yaml_path.write_text(OmegaConf.to_yaml(metadata))
logger.info(f"Wrote metadata in {yaml_path}")
return metadata
if __name__ == "__main__":
from pprint import pprint
import gdsfactory as gf
gdspath = (
gf.CONFIG["samples_path"] / "mask_custom" / "build" / "mask" / "sample_mask.gds"
)
build_directory = gdspath.parent.parent
doe_directory = build_directory / "cache_doe"
yaml_path = gdspath.with_suffix(".yml")
d = merge_yaml(doe_directory=doe_directory, yaml_path=yaml_path)
pprint(d)
```
#### File: gdsfactory/models/heater.py
```python
dn_dt_si = 1.87e-4
dn_dt_sio2 = 8.5e-6
def delta_temperature_pi(
length: float, wavelength: float = 1.55, dndT: float = 1.8e-4
) -> float:
return wavelength / (2.0 * length * dndT)
if __name__ == "__main__":
for length in [320, 600]:
dT = delta_temperature_pi(length=length)
print(f"length = {length}, dT = {dT:.3f} K")
```
#### File: gdsfactory/read/from_dphox.py
```python
import gdsfactory as gf
def from_dphox(device: "dp.Device", foundry: "dp.foundry.Foundry") -> gf.Component:
"""Converts a Dphox Device into a gdsfactory Component.
Note that you need to install dphox `pip install dphox`
https://dphox.readthedocs.io/en/latest/index.html
Args:
device:
foundry:
"""
c = gf.Component(device.name)
for layer_name, shapely_multipolygon in device.layer_to_polys.items():
for poly in shapely_multipolygon:
layer = foundry.layer_to_gds_label[layer_name]
c.add_polygon(points=poly, layer=layer)
for port_name, port in device.port.items():
c.add_port(
name=port_name,
midpoint=(port.x, port.y),
orientation=port.a,
width=port.w,
)
return c
if __name__ == "__main__":
import dphox as dp
from dphox.demo import lateral_nems_ps
nems_ps = lateral_nems_ps(waveguide_w=0.3)
c = from_dphox(nems_ps, foundry=dp.foundry.FABLESS)
c.show()
```
#### File: gdsfactory/read/from_gdspaths.py
```python
import pathlib
from typing import Tuple
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.read.import_gds import import_gds
from gdsfactory.types import ComponentOrPath, PathType
@cell
def from_gdspaths(cells: Tuple[ComponentOrPath, ...]) -> Component:
"""Combine all GDS files or gf.components into a gf.component.
Args:
cells: List of gdspaths or Components
"""
component = Component()
for c in cells:
if not isinstance(c, Component):
c = import_gds(c)
component << c
return component
def from_gdsdir(dirpath: PathType) -> Component:
"""Merges GDS cells from a directory into a single Component"""
dirpath = pathlib.Path(dirpath)
return from_gdspaths(dirpath.glob("*.gds"))
if __name__ == "__main__":
from gdsfactory.config import diff_path
# c = gdspaths([gf.components.straight(), gf.components.bend_circular()])
# leave these two lines to end up tests showing the diff
c = from_gdspaths(diff_path.glob("*.gds"))
c.show()
```
#### File: gdsfactory/read/import_gds.py
```python
from functools import lru_cache
from pathlib import Path
from typing import Callable, Optional, Union, cast
import gdspy
import numpy as np
from omegaconf import OmegaConf
from phidl.device_layout import CellArray, DeviceReference
from gdsfactory.cell import CACHE
from gdsfactory.component import Component
from gdsfactory.config import CONFIG, logger
from gdsfactory.name import get_name_short
from gdsfactory.snap import snap_to_grid
@lru_cache(maxsize=None)
def import_gds(
gdspath: Union[str, Path],
cellname: Optional[str] = None,
flatten: bool = False,
snap_to_grid_nm: Optional[int] = None,
name: Optional[str] = None,
decorator: Optional[Callable] = None,
gdsdir: Optional[Union[str, Path]] = None,
safe_cell_names: bool = False,
**kwargs,
) -> Component:
"""Returns a Componenent from a GDS file.
Adapted from phidl/geometry.py
if any cell names are found on the component CACHE we append a $ with a
number to the name
Args:
gdspath: path of GDS file.
cellname: cell of the name to import (None) imports top cell.
flatten: if True returns flattened (no hierarchy)
snap_to_grid_nm: snap to different nm grid (does not snap if False)
name: Optional name. Over-rides the default imported name.
decorator: function to apply over the imported gds.
gdsdir: optional GDS directory.
safe_cell_names: append file hash to imported cell names to avoid
duplicated cell names.
kwargs: extra info for the imported component (polarization, wavelength ...).
"""
gdspath = Path(gdsdir) / Path(gdspath) if gdsdir else Path(gdspath)
gdshash = gdspy.gdsii_hash(gdspath)
if not gdspath.exists():
raise FileNotFoundError(f"No file {gdspath!r} found")
metadata_filepath = gdspath.with_suffix(".yml")
gdsii_lib = gdspy.GdsLibrary()
gdsii_lib.read_gds(str(gdspath))
top_level_cells = gdsii_lib.top_level()
cellnames = [c.name for c in top_level_cells]
if cellname is not None:
if cellname not in gdsii_lib.cells:
raise ValueError(
f"cell {cellname} is not in file {gdspath} with cells {cellnames}"
)
topcell = gdsii_lib.cells[cellname]
elif cellname is None and len(top_level_cells) == 1:
topcell = top_level_cells[0]
elif cellname is None and len(top_level_cells) > 1:
raise ValueError(
f"import_gds() There are multiple top-level cells in {gdspath!r}, "
f"you must specify `cellname` to select of one of them among {cellnames}"
)
if name:
if name in CACHE:
raise ValueError(
f"name = {name!r} already on cache. "
"Please, choose a different name or set name = None. "
)
else:
topcell.name = name
if flatten:
component = Component(name=name or cellname or cellnames[0])
polygons = topcell.get_polygons(by_spec=True)
for layer_in_gds, polys in polygons.items():
component.add_polygon(polys, layer=layer_in_gds)
component.name = (
get_name_short(f"{component.name}_{gdshash}")
if safe_cell_names
else get_name_short(component.name)
)
else:
D_list = []
cell_to_device = {}
for c in gdsii_lib.cells.values():
D = Component(name=c.name)
D.polygons = c.polygons
D.references = c.references
D.name = c.name
for label in c.labels:
rotation = label.rotation
if rotation is None:
rotation = 0
label_ref = D.add_label(
text=label.text,
position=np.asfarray(label.position),
magnification=label.magnification,
rotation=rotation * 180 / np.pi,
layer=(label.layer, label.texttype),
)
label_ref.anchor = label.anchor
D.name = (
get_name_short(f"{D.name}_{gdshash}")
if safe_cell_names
else get_name_short(D.name)
)
D.unlock()
cell_to_device.update({c: D})
D_list += [D]
for D in D_list:
# First convert each reference so it points to the right Device
converted_references = []
for e in D.references:
ref_device = cell_to_device[e.ref_cell]
if isinstance(e, gdspy.CellReference):
dr = DeviceReference(
device=ref_device,
origin=e.origin,
rotation=e.rotation,
magnification=e.magnification,
x_reflection=e.x_reflection,
)
dr.owner = D
converted_references.append(dr)
elif isinstance(e, gdspy.CellArray):
dr = CellArray(
device=ref_device,
columns=e.columns,
rows=e.rows,
spacing=e.spacing,
origin=e.origin,
rotation=e.rotation,
magnification=e.magnification,
x_reflection=e.x_reflection,
)
dr.owner = D
converted_references.append(dr)
D.references = converted_references
# Next convert each Polygon
# temp_polygons = list(D.polygons)
# D.polygons = []
# for p in temp_polygons:
# D.add_polygon(p)
# Next convert each Polygon
temp_polygons = list(D.polygons)
D.polygons = []
for p in temp_polygons:
if snap_to_grid_nm:
points_on_grid = snap_to_grid(p.polygons[0], nm=snap_to_grid_nm)
p = gdspy.Polygon(
points_on_grid, layer=p.layers[0], datatype=p.datatypes[0]
)
D.add_polygon(p)
component = cell_to_device[topcell]
cast(Component, component)
name = name or component.name
component.name = name
if metadata_filepath.exists():
logger.info(f"Read YAML metadata from {metadata_filepath}")
metadata = OmegaConf.load(metadata_filepath)
for port_name, port in metadata.ports.items():
if port_name not in component.ports:
component.add_port(
name=port_name,
midpoint=port.midpoint,
width=port.width,
orientation=port.orientation,
layer=port.layer,
port_type=port.port_type,
)
component.settings = OmegaConf.to_container(metadata.settings)
component.name = name
if decorator:
component_new = decorator(component)
component = component_new or component
if flatten:
component.flatten()
component.info.update(**kwargs)
component.lock()
return component
if __name__ == "__main__":
gdspath = CONFIG["gdsdir"] / "mzi2x2.gds"
# c = import_gds(gdspath, snap_to_grid_nm=5, flatten=True, name="TOP")
c = import_gds(gdspath, snap_to_grid_nm=5, flatten=True)
print(c)
c.show()
```
#### File: gdsfactory/routing/add_electrical_pads_top_dc.py
```python
from typing import Callable
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.pad import pad_array as pad_array_function
from gdsfactory.components.wire import wire_corner
from gdsfactory.port import select_ports_electrical
from gdsfactory.routing.get_bundle import get_bundle
from gdsfactory.routing.sort_ports import sort_ports_x
from gdsfactory.types import ComponentFactory, Float2
@cell
def add_electrical_pads_top_dc(
component: Component,
spacing: Float2 = (0.0, 100.0),
pad_array: ComponentFactory = pad_array_function,
select_ports: Callable = select_ports_electrical,
**kwargs,
) -> Component:
"""connects component electrical ports with pad array at the top
Args:
component:
spacing: component to pad spacing
pad_array:
select_ports: function to select_ports
**kwargs: route settings
"""
c = Component()
cref = c << component
ports = select_ports(cref.ports)
ports_component = list(ports.values())
ports_component = [port.copy() for port in ports_component]
for port in ports_component:
port.orientation = 90
pads = c << pad_array(columns=len(ports))
pads.x = cref.x + spacing[0]
pads.ymin = cref.ymax + spacing[1]
ports_pads = list(pads.ports.values())
ports_component = sort_ports_x(ports_component)
ports_pads = sort_ports_x(ports_pads)
routes = get_bundle(ports_component, ports_pads, bend=wire_corner, **kwargs)
for route in routes:
c.add(route.references)
c.add_ports(cref.ports)
for port in ports_component:
c.ports.pop(port.name)
c.copy_child_info(component)
return c
if __name__ == "__main__":
import gdsfactory as gf
c = gf.components.straight_heater_metal(length=100.0)
cc = add_electrical_pads_top_dc(component=c, layer=(31, 0), width=10)
cc.show()
```
#### File: gdsfactory/samples/20_components.py
```python
import gdsfactory as gf
def straight_wide1(width=10, **kwargs) -> gf.Component:
return gf.components.straight(width=width, **kwargs)
straight_wide2 = gf.partial(gf.components.straight, width=10)
if __name__ == "__main__":
# c = straight_wide1()
c = straight_wide2()
c.show()
```
#### File: samples/mask_pack/test_mask.py
```python
import shutil
from typing import Tuple
import numpy as np
import gdsfactory as gf
from gdsfactory.add_grating_couplers import (
add_grating_couplers_with_loopback_fiber_array,
)
from gdsfactory.component import Component
from gdsfactory.config import CONFIG
from gdsfactory.mask.write_labels import write_labels
layer_label = (200, 0)
add_te = gf.partial(
gf.routing.add_fiber_array,
grating_coupler=gf.components.grating_coupler_elliptical_te,
layer_label=layer_label,
)
add_tm = gf.partial(
gf.routing.add_fiber_array,
grating_coupler=gf.components.grating_coupler_elliptical_tm,
bend_radius=20,
layer_label=layer_label,
)
@gf.cell
def coupler_te(
gap: float,
length: int,
) -> Component:
"""Evanescent coupler with TE grating coupler."""
c = gf.components.coupler(gap=gap, length=length)
cc = add_te(c)
return cc
@gf.cell
def spiral_te(width: float = 0.5, length: int = 20e3) -> Component:
"""Spiral with TE grating_coupler
Args:
width: waveguide width um
length: um
"""
c = gf.components.spiral_inner_io(width=width, length=length)
ce = gf.components.extend_ports(c)
cc = add_grating_couplers_with_loopback_fiber_array(
component=ce,
grating_coupler=gf.components.grating_coupler_elliptical_te,
bend=gf.components.bend_euler,
layer_label=layer_label,
component_name=c.name,
)
return cc
@gf.cell
def spiral_tm(width=0.5, length=20e3):
"""Spiral with TM grating_coupler
Args:
width: waveguide width um
length: um
"""
c = gf.components.spiral_inner_io(
width=width, length=length, waveguide_spacing=10, N=5
)
ce = gf.components.extend_ports(c)
cc = add_grating_couplers_with_loopback_fiber_array(
component=ce,
grating_coupler=gf.components.grating_coupler_elliptical_tm,
bend=gf.components.bend_euler,
layer_label=layer_label,
component_name=c.name,
)
return cc
def test_mask(
precision: float = 1e-9,
labels_prefix: str = "opt",
layer_label: Tuple[int, int] = layer_label,
) -> Component:
"""Returns mask."""
workspace_folder = CONFIG["samples_path"] / "mask_pack"
build_path = workspace_folder / "build"
mask_path = build_path / "mask"
shutil.rmtree(build_path, ignore_errors=True)
mask_path.mkdir(parents=True, exist_ok=True)
gdspath = mask_path / "sample_mask.gds"
# markdown_path = gdspath.with_suffix(".md")
# json_path = gdspath.with_suffix(".json")
# test_metadata_path = gdspath.with_suffix(".tp.json")
components = [spiral_te(length=length) for length in np.array([2, 4, 6]) * 1e4]
components += [coupler_te(length=length, gap=0.2) for length in [10, 20, 30, 40]]
c = gf.pack(components)
m = c[0]
m.name = "sample_mask"
m.write_gds_with_metadata(gdspath)
csvpath = write_labels(
gdspath=gdspath, prefix=labels_prefix, layer_label=layer_label
)
assert gdspath.exists()
assert csvpath.exists()
return m
if __name__ == "__main__":
m = test_mask()
m.show()
```
#### File: simulation/gmeep/get_simulation.py
```python
import inspect
import warnings
from typing import Any, Dict, Optional
import meep as mp
import numpy as np
import pydantic
import gdsfactory as gf
from gdsfactory.component import Component
from gdsfactory.components.extension import move_polar_rad_copy
from gdsfactory.simulation.gmeep.get_material import get_material
from gdsfactory.tech import LAYER_STACK, LayerStack
mp.verbosity(0)
sig = inspect.signature(mp.Simulation)
settings_meep = set(sig.parameters.keys())
@pydantic.validate_arguments
def get_simulation(
component: Component,
resolution: int = 30,
extend_ports_length: Optional[float] = 10.0,
layer_stack: LayerStack = LAYER_STACK,
zmargin_top: float = 3.0,
zmargin_bot: float = 3.0,
tpml: float = 1.5,
clad_material: str = "SiO2",
is_3d: bool = False,
wavelength_start: float = 1.5,
wavelength_stop: float = 1.6,
wavelength_points: int = 50,
dfcen: float = 0.2,
port_source_name: str = "o1",
port_field_monitor_name: str = "o2",
port_margin: float = 3,
distance_source_to_monitors: float = 0.2,
port_source_offset: float = 0,
port_monitor_offset: float = 0,
dispersive: bool = False,
**settings,
) -> Dict[str, Any]:
r"""Returns Simulation dict from gdsfactory Component
based on meep directional coupler example
https://meep.readthedocs.io/en/latest/Python_Tutorials/GDSII_Import/
https://support.lumerical.com/hc/en-us/articles/360042095873-Metamaterial-S-parameter-extraction
.. code::
top view
________________________________
| |
| xmargin_left | port_extension
|<------> port_margin ||<-->
___|___________ _________||___
| \ / |
| \ / |
| ====== |
| / \ |
___|___________/ \__________|___
| | <-------->|
| |ymargin_bot xmargin_right|
| | |
|___|___________________________|
side view
________________________________
| | |
| | |
| zmargin_top |
|ymargin | |
|<---> _____ _|___ |
| | | | | |
| | | | | |
| |_____| |_____| |
| | |
| | |
| |zmargin_bot |
| | |
|_______|_______________________|
Args:
component: gf.Component
resolution: in pixels/um (20: for coarse, 120: for fine)
extend_ports_length: to extend ports beyond the PML
layer_stack: Dict of layer number (int, int) to thickness (um)
zmargin_top: thickness for cladding above core
zmargin_bot: thickness for cladding below core
tpml: PML thickness (um)
clad_material: material for cladding
is_3d: if True runs in 3D
wavelength_start: wavelength min (um)
wavelength_stop: wavelength max (um)
wavelength_points: wavelength steps
dfcen: delta frequency
port_source_name: input port name
port_field_monitor_name:
port_margin: margin on each side of the port
distance_source_to_monitors: in (um) source goes before
port_source_offset: offset between source GDS port and source MEEP port
port_monitor_offset: offset between monitor GDS port and monitor MEEP port
dispersive: use dispersive material models (requires higher resolution)
Keyword Args:
settings: other parameters for sim object (resolution, symmetries, etc.)
Returns:
simulation dict: sim, monitors, sources
Make sure you review the simulation before you simulate a component
.. code::
import gdsfactory as gf
import gdsfactory.simulation.meep as gm
c = gf.components.bend_circular()
gm.write_sparameters_meep(c, run=False)
"""
for setting in settings.keys():
if setting not in settings_meep:
raise ValueError(f"{setting} not in {settings_meep}")
layer_to_thickness = layer_stack.get_layer_to_thickness()
layer_to_material = layer_stack.get_layer_to_material()
layer_to_zmin = layer_stack.get_layer_to_zmin()
layer_to_sidewall_angle = layer_stack.get_layer_to_sidewall_angle()
component_ref = component.ref()
component_ref.x = 0
component_ref.y = 0
wavelengths = np.linspace(wavelength_start, wavelength_stop, wavelength_points)
port_names = list(component_ref.ports.keys())
if port_source_name not in port_names:
warnings.warn(f"port_source_name={port_source_name!r} not in {port_names}")
port_source = component_ref.get_ports_list()[0]
port_source_name = port_source.name
warnings.warn(f"Selecting port_source_name={port_source_name!r} instead.")
if port_field_monitor_name not in component_ref.ports:
warnings.warn(
f"port_field_monitor_name={port_field_monitor_name!r} not in {port_names}"
)
port_field_monitor = (
component_ref.get_ports_list()[0]
if len(component.ports) < 2
else component.get_ports_list()[1]
)
port_field_monitor_name = port_field_monitor.name
warnings.warn(
f"Selecting port_field_monitor_name={port_field_monitor_name!r} instead."
)
assert isinstance(
component, Component
), f"component needs to be a gf.Component, got Type {type(component)}"
component_extended = (
gf.components.extension.extend_ports(
component=component, length=extend_ports_length, centered=True
)
if extend_ports_length
else component
)
gf.show(component_extended)
component_extended.flatten()
component_extended = component_extended.ref()
# geometry_center = [component_extended.x, component_extended.y]
# geometry_center = [0, 0]
# print(geometry_center)
layers_thickness = [
layer_to_thickness[layer]
for layer in component.layers
if layer in layer_to_thickness
]
t_core = max(layers_thickness)
cell_thickness = tpml + zmargin_bot + t_core + zmargin_top + tpml if is_3d else 0
cell_size = mp.Vector3(
component.xsize + 2 * tpml,
component.ysize + 2 * tpml,
cell_thickness,
)
geometry = []
layer_to_polygons = component_extended.get_polygons(by_spec=True)
for layer, polygons in layer_to_polygons.items():
if layer in layer_to_thickness and layer in layer_to_material:
height = layer_to_thickness[layer] if is_3d else mp.inf
zmin_um = layer_to_zmin[layer] if is_3d else 0
# center = mp.Vector3(0, 0, (zmin_um + height) / 2)
for polygon in polygons:
vertices = [mp.Vector3(p[0], p[1], zmin_um) for p in polygon]
material_name = layer_to_material[layer]
material = get_material(name=material_name, dispersive=dispersive)
geometry.append(
mp.Prism(
vertices=vertices,
height=height,
sidewall_angle=layer_to_sidewall_angle[layer],
material=material,
# center=center
)
)
freqs = 1 / wavelengths
fcen = np.mean(freqs)
frequency_width = dfcen * fcen
# Add source
port = component_ref.ports[port_source_name]
angle_rad = np.radians(port.orientation)
width = port.width + 2 * port_margin
size_x = width * abs(np.sin(angle_rad))
size_y = width * abs(np.cos(angle_rad))
size_x = 0 if size_x < 0.001 else size_x
size_y = 0 if size_y < 0.001 else size_y
size_z = cell_thickness - 2 * tpml if is_3d else 20
size = [size_x, size_y, size_z]
xy_shifted = move_polar_rad_copy(
np.array(port.center), angle=angle_rad, length=port_source_offset
)
center = xy_shifted.tolist() + [0] # (x, y, z=0)
field_monitor_port = component_ref.ports[port_field_monitor_name]
field_monitor_point = field_monitor_port.center.tolist() + [0] # (x, y, z=0)
if np.isclose(port.orientation, 0):
direction = mp.X
elif np.isclose(port.orientation, 90):
direction = mp.Y
elif np.isclose(port.orientation, 180):
direction = mp.X
elif np.isclose(port.orientation, 270):
direction = mp.Y
else:
ValueError(f"Port angle {port.orientation} not 0, 90, 180, or 270 degrees!")
sources = [
mp.EigenModeSource(
src=mp.GaussianSource(fcen, fwidth=frequency_width),
size=size,
center=center,
eig_band=1,
eig_parity=mp.NO_PARITY if is_3d else mp.EVEN_Y + mp.ODD_Z,
eig_match_freq=True,
eig_kpoint=-1 * mp.Vector3(x=1).rotate(mp.Vector3(z=1), angle_rad),
direction=direction,
)
]
sim = mp.Simulation(
cell_size=cell_size,
boundary_layers=[mp.PML(tpml)],
sources=sources,
geometry=geometry,
default_material=get_material(name=clad_material),
resolution=resolution,
**settings,
)
# Add port monitors dict
monitors = {}
for port_name in component_ref.ports.keys():
port = component_ref.ports[port_name]
angle_rad = np.radians(port.orientation)
width = port.width + 2 * port_margin
size_x = width * abs(np.sin(angle_rad))
size_y = width * abs(np.cos(angle_rad))
size_x = 0 if size_x < 0.001 else size_x
size_y = 0 if size_y < 0.001 else size_y
size = mp.Vector3(size_x, size_y, size_z)
size = [size_x, size_y, size_z]
# if monitor has a source move monitor inwards
length = (
-distance_source_to_monitors + port_source_offset
if port_name == port_source_name
else port_monitor_offset
)
xy_shifted = move_polar_rad_copy(
np.array(port.center), angle=angle_rad, length=length
)
center = xy_shifted.tolist() + [0] # (x, y, z=0)
m = sim.add_mode_monitor(freqs, mp.ModeRegion(center=center, size=size))
m.z = 0
monitors[port_name] = m
return dict(
sim=sim,
cell_size=cell_size,
freqs=freqs,
monitors=monitors,
sources=sources,
field_monitor_point=field_monitor_point,
port_source_name=port_source_name,
initialized=False,
)
sig = inspect.signature(get_simulation)
settings_get_simulation = set(sig.parameters.keys()).union(settings_meep)
if __name__ == "__main__":
c = gf.components.straight(length=2, width=0.5)
sim_dict = get_simulation(
c,
is_3d=False,
# resolution=50,
# port_source_offset=-0.1,
# port_field_monitor_offset=-0.1,
# port_margin=2.5,
)
# sim.plot3D()
# sim.plot2D() # plot top view (is_3D needs to be False)
# Plot monitor cross-section (is_3D needs to be True)
# sim.init_sim()
# eps_data = sim.get_epsilon()
# from mayavi import mlab
# s = mlab.contour3d(eps_data, colormap="YlGnBu")
# mlab.show()
print(settings_get_simulation)
```
#### File: gdsfactory/simulation/plot.py
```python
from typing import Optional, Tuple
import matplotlib.pyplot as plt
import numpy as np
from pandas import DataFrame
import gdsfactory as gf
def plot_sparameters(
df: DataFrame,
logscale: bool = True,
keys: Optional[Tuple[str, ...]] = None,
**sim_settings,
):
"""Plots Sparameters from a pandas DataFrame.
Args:
df: Sparameters pandas DataFrame
logscale: plots 20*log10(S)
keys: list of keys to plot, plots all by default.
Keyword Args:
sim_settings: simulation settings for the write_sparameters_function
"""
w = df["wavelengths"] * 1e3
keys = keys or [
key for key in df.keys() if key.lower().startswith("s") and key.endswith("m")
]
for key in keys:
if key in df:
y = df[key]
y = 20 * np.log10(y) if logscale else y
plt.plot(w, y, label=key[:-1])
else:
raise ValueError(f"{key} not in {df.keys()}")
plt.legend()
plt.xlabel("wavelength (nm)")
plt.ylabel("|S| (dB)") if logscale else plt.ylabel("|S|")
plt.show()
def plot_imbalance2x2(df: DataFrame, port1: str = "s13m", port2: str = "s14m") -> None:
"""Plots imbalance in % for 2x2 coupler"""
y1 = df[port1].values
y2 = df[port2].values
imbalance = y1 / y2
x = df["wavelengths"] * 1e3
plt.plot(x, 100 * abs(imbalance))
plt.xlabel("wavelength (nm)")
plt.ylabel("imbalance (%)")
plt.grid()
def plot_loss2x2(df: DataFrame, port1: str = "s13m", port2: str = "s14m") -> None:
"""Plots imbalance in % for 2x2 coupler"""
y1 = df[port1].values
y2 = df[port2].values
x = df["wavelengths"] * 1e3
plt.plot(x, abs(10 * np.log10(y1 ** 2 + y2 ** 2)))
plt.xlabel("wavelength (nm)")
plt.ylabel("excess loss (dB)")
plot_loss1x2 = gf.partial(plot_loss2x2, port1="s13m", port2="s12m")
plot_imbalance1x2 = gf.partial(plot_imbalance2x2, port1="s13m", port2="s12m")
if __name__ == "__main__":
import gdsfactory.simulation as sim
df = sim.get_sparameters_data_lumerical(component=gf.components.mmi1x2)
plot_sparameters(df, logscale=True)
plt.show()
```
#### File: simphony/components/coupler.py
```python
from SiPANN.scee import Standard
from SiPANN.scee_int import SimphonyWrapper
def coupler(
width: float = 0.5,
thickness: float = 0.22,
gap: float = 0.22,
length: float = 10.0,
sw_angle: float = 90.0,
dx: float = 1.5,
dy: float = 5.0,
**kwargs,
):
r"""Return simphony Directional coupler model.
Args:
width: Width of the straight in um (Valid for 0.4-0.6)
thickness: Thickness of straight in um (Valid for 0.18-0.24)
gap: Minimum distance between the two straights edge in um. (Must be > 0.1)
length: float or ndarray Length of the straight portion of both straights in um.
dx: Horizontal distance between end of coupler until straight portion in nm.
dy: Vertical distance between end of coupler until straight portion in um.
sw_angle: Sidewall angle from horizontal in degrees
This is what most people think of when they think directional coupler.
Ports are named as
.. code::
H
dx dx
|------| |------|
o2 ________ _______o3 _ _
\ / | |
\ length / | _|_V
======================= gap | dy
/ \ |
________/ \_______ |
o1 o4
.. plot::
:include-source:
import gdsfactory as gf
c = gf.components.coupler(gap=0.2, length=10)
c.plot()
.. plot::
:include-source:
import gdsfactory.simulation.simphony.components as gc
import gdsfactory.simulation simphony as gs
c = gc.coupler()
gs.plot_model(c)
"""
# SiPANN units are in nm
width = width * 1e3
thickness = thickness * 1e3
gap = gap * 1e3
length = length * 1e3
H = dx * 1e3
V = dy * 1e3 / 2
s = Standard(
width=width,
thickness=thickness,
gap=gap,
length=length,
H=H,
V=V,
sw_angle=sw_angle,
)
model = SimphonyWrapper(s)
model.pins = ("o1", "o2", "o4", "o3")
model.sipann = s
return model
if __name__ == "__main__":
import matplotlib.pyplot as plt
from gdsfactory.simulation.simphony.plot_model import plot_model
c = coupler()
print(c)
plot_model(c)
plt.show()
```
#### File: simphony/components/mmi2x2.py
```python
import gdsfactory as gf
from gdsfactory.simulation.simphony.model_from_gdsfactory import model_from_gdsfactory
def mmi2x2(**kwargs):
r"""Return 2x2 MultiModeInterferometer Sparameter model.
Args:
width: input and output straight width
width_taper: interface between input straights and mmi region
length_taper: into the mmi region
length_mmi: in x direction
width_mmi: in y direction
gap_mmi: gap between tapered wg
taper: taper function
layer:
layers_cladding:
cladding_offset
.. code::
length_mmi
<------>
________
| |
__/ \__
o2 __ __ o3
\ /_ _ _ _
| | _ _ _ _| gap_mmi
__/ \__
o1 __ __ o4
\ /
|________|
<->
length_taper
.. plot::
:include-source:
import gdsfactory as gf
c = gf.components.mmi2x2(length_mmi=15.45, width_mmi=2.1)
c.plot()
.. plot::
:include-source:
import gdsfactory.simulation simphony as gs
import gdsfactory.simulation.simphony.components as gc
c = gc.mmi2x2()
gs.plot_model(c)
"""
m = model_from_gdsfactory(gf.components.mmi2x2)
return m
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
wav = np.linspace(1520, 1570, 1024) * 1e-9
f = 3e8 / wav
c = mmi2x2()
s = c.s_parameters(freq=f)
plt.plot(wav, np.abs(s[:, 1] ** 2))
print(c.pins)
plt.show()
```
#### File: simulation/simphony/get_transmission.py
```python
from simphony.netlist import Subcircuit
from simphony.simulation import SweepSimulation
from simphony.tools import freq2wl
def get_transmission(
circuit: Subcircuit,
pin_in: str = "o1",
pin_out: str = "o2",
start: float = 1500e-9,
stop: float = 1600e-9,
num: int = 2000,
):
"""Return transmission for a circuit.
Args:
circuit:
pin_in: input pin
pin_out: output pin
start: start wavelength (m)
stop: stop wavelength (m)
num: number of points
"""
simulation = SweepSimulation(circuit, start, stop, num)
result = simulation.simulate()
f, s = result.data(pin_in, pin_out)
w = freq2wl(f) * 1e9
return dict(wavelengths=w, s=s)
```
#### File: simulation/simphony/model_from_gdsfactory.py
```python
import numpy as np
from scipy.constants import speed_of_light
from simphony.elements import Model
from simphony.tools import interpolate
import gdsfactory as gf
import gdsfactory.simulation.lumerical as sim
from gdsfactory.component import Component
def model_from_gdsfactory(
component: Component, dirpath=gf.CONFIG["sparameters"], **kwargs
) -> Model:
"""Return simphony model from gdsfactory Component Sparameters
Args:
component: component factory or instance
dirpath: sparameters directory
kwargs: settings
"""
kwargs.pop("function_name", "")
kwargs.pop("module", "")
component = gf.call_if_func(component, **kwargs)
pins, f, s = sim.read_sparameters_lumerical(component=component, dirpath=dirpath)
def interpolate_sp(freq):
return interpolate(freq, f, s)
m = Model()
m.pins = pins
m.s_params = (f, s)
m.s_parameters = interpolate_sp
m.freq_range = (m.s_params[0][0], m.s_params[0][-1])
m.wavelengths = speed_of_light / np.array(f)
m.s = s
return m
if __name__ == "__main__":
import matplotlib.pyplot as plt
c = model_from_gdsfactory(gf.c.mmi1x2())
# wav = np.linspace(1520, 1570, 1024) * 1e-9
# f = speed_of_light / wav
# s = c.s_parameters(freq=f)
wav = c.wavelengths
s = c.s
plt.plot(wav * 1e9, np.abs(s[:, 1] ** 2))
plt.show()
```
#### File: simulation/simphony/plot_model.py
```python
from typing import Tuple
import matplotlib.pyplot as plt
import numpy as np
from scipy.constants import speed_of_light
from simphony.elements import Model
def plot_model(
model: Model,
pin_in: str = "o1",
pins: Tuple[str, ...] = None,
wavelengths=None,
logscale: bool = True,
fig=None,
phase: bool = False,
) -> None:
"""Plot simphony Sparameters for a model
Args:
model: simphony model
pin_in: input pin name
pins: list of pins
wavelengths (m):
logscale:
fig: figure
phase: plots phase
.. plot::
:include-source:
import gdsfactory.simulation simphony as gs
import gdsfactory.simulation.simphony.components as gc
c = gc.mmi1x2()
gs.plot_model(c)
"""
m = model() if callable(model) else model
if wavelengths is None:
if hasattr(m, "wavelengths"):
wavelengths = m.wavelengths
else:
wavelengths = np.linspace(1520e-9, 1580e-9, 2000)
f = speed_of_light / wavelengths
s = m.s_parameters(freq=f)
pins = pins or m.pins
if not isinstance(pins, (tuple, set, list)):
raise ValueError(f"pins {pins} need to be a tuple, set or list")
for pin in pins:
if pin not in m.pins:
raise ValueError(f"{pin} not in {m.pins}")
if pin_in not in m.pins:
raise ValueError(f"pin_in = `{pin_in}` not in {m.pins}")
pin_in_index = m.pins.index(pin_in)
fig = fig or plt.subplot()
ax = fig.axes
for pin_out in pins:
pin_out_index = m.pins.index(pin_out)
if phase:
y = np.angle(s[:, pin_out_index, pin_in_index])
ylabel = "angle (rad)"
else:
y = np.abs(s[:, pin_out_index, pin_in_index]) ** 2
y = 10 * np.log10(y) if logscale else y
ylabel = "|S (dB)|" if logscale else "|S|"
ax.plot(wavelengths * 1e9, y, label=pin_out)
ax.set_xlabel("wavelength (nm)")
ax.set_ylabel(ylabel)
plt.legend()
plt.show()
return ax
if __name__ == "__main__":
from simphony.library import siepic
from gdsfactory.simulation.simphony.components.straight import straight
w = np.linspace(1520, 1570, 1024) * 1e-9
coupler = siepic.ebeam_dc_halfring_straight(
gap=200e-9, radius=10e-6, width=500e-9, thickness=220e-9, couple_length=0.0
)
# plot_model(coupler, pin_in="n1")
# plt.legend()
# plt.show()
m = straight()
plot_model(m, phase=False)
plt.show()
```
#### File: simulation/sipann/coupler_ring.py
```python
from SiPANN.scee import HalfRacetrack
def coupler_ring(
radius: float = 5.0,
width: float = 0.5,
thickness: float = 0.22,
gap: float = 0.22,
length_x: float = 4.0,
sw_angle: float = 90.0,
**kwargs
):
r"""Return model for for half a ring coupler.
Args:
radius: 5
width: width um (Valid for 0.4-0.6)
thickness: Thickness in um (Valid for 0.18-0.24)
gap: distance between straights in um. (Must be > 0.1)
length_x: Length of straight portion of coupler in um
sw_angle: Sidewall angle from horizontal in degrees
kwargs: geometrical args that this model ignores
.. code::
pin naming in sipann
2 \ / 4
\ /
---------
1---------------3
for simphony/gdsfactory
o2 o3
| |
\ /
\ /
---=========---
o1 length_x o4
"""
width = width * 1e3
thickness = thickness * 1e3
gap = gap * 1e3
length = length_x * 1e3
radius = radius * 1e3
s = HalfRacetrack(
radius=radius,
width=width,
thickness=thickness,
gap=gap,
length=length,
sw_angle=sw_angle,
)
return s
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
c = coupler_ring()
wavelength = np.linspace(1500, 1600, 500)
k = c.predict((1, 4), wavelength)
t = c.predict((1, 3), wavelength)
plt.figure(figsize=(15, 5))
plt.subplot(121)
plt.plot(wavelength, np.abs(k) ** 2, label="k")
plt.plot(wavelength, np.abs(t) ** 2, label="t")
plt.xlabel("Wavelength (nm)")
plt.ylabel("Magnitude Squared")
plt.title(r"Crossover at $\lambda \approx 1550nm$")
plt.legend()
plt.show()
```
#### File: gdsfactory/tests/test_component_from_yaml2.py
```python
import pytest
from pytest_regressions.data_regression import DataRegressionFixture
import gdsfactory as gf
from gdsfactory.difftest import difftest
mirror_port = """
name: mirror_port
instances:
mmi_long:
component: mmi1x2
settings:
width_mmi: 4.5
length_mmi: 5
placements:
mmi_long:
port: o1
x: 20
y: 10
mirror: True
ports:
o1: mmi_long,o3
o2: mmi_long,o2
o3: mmi_long,o1
"""
mirror_x = """
name: mirror_x
instances:
mmi_long:
component: mmi1x2
settings:
width_mmi: 4.5
length_mmi: 5
placements:
mmi_long:
x: 0
y: 0
mirror: 25
ports:
o1: mmi_long,o3
o2: mmi_long,o2
o3: mmi_long,o1
"""
rotation = """
name: rotation
instances:
mmi_long:
component: mmi1x2
settings:
width_mmi: 4.5
length_mmi: 5
placements:
mmi_long:
port: o1
x: 10
y: 20
rotation: 90
ports:
o1: mmi_long,o3
o2: mmi_long,o2
o3: mmi_long,o1
"""
dxdy = """
name: dxdy
instances:
mmi_long:
component: mmi1x2
settings:
width_mmi: 4.5
length_mmi: 10
mmi_short:
component: mmi1x2
settings:
width_mmi: 4.5
length_mmi: 5
placements:
mmi_short:
port: o1
x: 0
y: 0
mmi_long:
port: o1
x: mmi_short,o2
y: mmi_short,o2
dx: 10
dy: -10
ports:
o1: mmi_long,o3
"""
yaml_list = [mirror_port, mirror_x, rotation, dxdy]
@pytest.mark.parametrize("yaml_index", range(len(yaml_list)))
def test_components(
yaml_index: int, data_regression: DataRegressionFixture, check: bool = True
) -> None:
yaml = yaml_list[yaml_index]
c = gf.read.from_yaml(yaml)
difftest(c)
if check:
data_regression.check(c.to_dict())
if __name__ == "__main__":
c = gf.read.from_yaml(mirror_port)
c = gf.read.from_yaml(dxdy)
c.show()
```
#### File: gdsfactory/tests/test_component_from_yaml_bezier.py
```python
import gdsfactory as gf
from gdsfactory.component import Component
yaml = """
name:
test_component_yaml_without_cell
instances:
mmi:
component: mmi1x2
bend:
component: bend_s
connections:
bend,o1: mmi,o2
"""
def test_component_from_yaml_without_cell() -> Component:
"""bezier does not have cell"""
c = gf.read.from_yaml(yaml)
assert c.name == "test_component_yaml_without_cell", c.name
assert len(c.get_dependencies()) == 2, len(c.get_dependencies())
assert len(c.ports) == 0, len(c.ports)
return c
if __name__ == "__main__":
c = test_component_from_yaml_without_cell()
print(c.name)
c.show()
```
#### File: gdsfactory/tests/test_get_route_error_bundle.py
```python
import pytest
import gdsfactory as gf
from gdsfactory.routing.manhattan import RouteWarning
def test_route_error_bundle():
"""Ensures that an impossible route raises value Error"""
c = gf.Component("get_route_from_steps_sample")
w = gf.components.array(
gf.partial(gf.components.straight, layer=(2, 0)),
rows=3,
columns=1,
spacing=(0, 50),
)
left = c << w
right = c << w
right.move((200, 100))
p1 = left.get_ports_list(orientation=0)
p2 = right.get_ports_list(orientation=180)
with pytest.warns(RouteWarning):
routes = gf.routing.get_bundle_from_steps(
p1,
p2,
steps=[{"x": 300}, {"x": 301}],
)
for route in routes:
c.add(route.references)
return c
if __name__ == "__main__":
c = test_route_error_bundle()
c.show()
```
#### File: gdsfactory/tests/test_import_gds_avoid_duplicated_cells.py
```python
import gdsfactory as gf
from gdsfactory import geometry
def test_import_first():
c1 = gf.Component("parent")
c1 << gf.components.mzi_arms()
gdspath1 = c1.write_gds("extra/mzi.gds")
mzi1 = gf.import_gds(gdspath1, safe_cell_names=True) # IMPORT
c1 = gf.components.mzi_arms() # BUILD
c2 = gf.grid([mzi1, c1])
gdspath2 = c2.write_gds("extra/mzi2.gds")
geometry.check_duplicated_cells(gdspath2)
def test_build_first():
c1 = gf.Component("parent")
c1 << gf.components.mzi_arms()
gdspath1 = c1.write_gds("extra/mzi.gds")
c1 = gf.components.mzi_arms() # BUILD
mzi1 = gf.import_gds(gdspath1, safe_cell_names=True) # IMPORT
c2 = gf.grid([mzi1, c1])
gdspath2 = c2.write_gds("extra/mzi2.gds")
geometry.check_duplicated_cells(gdspath2)
def test_import_twice():
c0 = gf.Component("parent")
c0 << gf.components.mzi_arms()
gdspath1 = c0.write_gds("extra/mzi.gds")
c1 = gf.import_gds(gdspath1) # IMPORT
c2 = gf.import_gds(gdspath1) # IMPORT
assert len(c1.references) == len(c2.references)
assert len(c1.polygons) == len(c2.polygons)
assert len(c1.labels) == len(c2.labels)
assert len(c1.hash_geometry()) == len(c2.hash_geometry())
def test_import_thrice():
c0 = gf.Component("parent")
c0 << gf.components.mzi_arms()
gdspath1 = c0.write_gds("extra/mzi.gds")
c = gf.Component()
c << gf.import_gds(gdspath1) # IMPORT
c << gf.import_gds(gdspath1) # IMPORT
c << gf.import_gds(gdspath1) # IMPORT
gdspath2 = c.write_gds("extra/mzis.gds")
geometry.check_duplicated_cells(gdspath2)
if __name__ == "__main__":
test_import_first()
test_build_first()
test_import_twice()
test_import_thrice()
# gf.clear_cache()
# c0 << gf.components.mzi_arms()
# gdspath1 = c0.write_gds("extra/mmi.gds")
# c = gf.Component("parent")
# c0 = gf.components.mmi1x2()
# gdspath1 = "extra/mmi.gds"
# c1 = gf.import_gds(gdspath1) # IMPORT
# c2 = gf.import_gds(gdspath1) # IMPORT
# c << c0
# c << c1
# c << c2
# gdspath2 = c.write_gds("extra/mzis.gds")
# geometry.check_duplicated_cells(gdspath2)
# c.show()
```
#### File: gdsfactory/tests/test_import_gds_settings.py
```python
from typing import Any, Dict, List, Union
from gdsfactory.components import factory
skip_test = {
"version_stamp",
"extend_ports_list",
"extend_port",
"grating_coupler_tree",
"compensation_path",
"spiral_inner_io_with_gratings",
"component_sequence",
"straight_heater_metal_90_90",
"straight_heater_metal_undercut_90_90",
"mzi_phase_shifter_top_heater_metal",
}
components_to_test = set(factory.keys()) - skip_test
def tuplify(iterable: Union[List, Dict]) -> Any:
"""From a list or tuple returns a tuple."""
if isinstance(iterable, list):
return tuple(map(tuplify, iterable))
if isinstance(iterable, dict):
return {k: tuplify(v) for k, v in iterable.items()}
return iterable
def sort_dict(d: Dict[str, Any]) -> Dict[str, Any]:
return {k: d[k] for k in sorted(d)}
# @pytest.mark.parametrize("component_type", components_to_test)
# def test_properties_components(component_type: str) -> Component:
# """Write component to GDS with setttings written on a label.
# Then import the GDS and check that the settings imported match the original settings.
# """
# cnew = gf.Component()
# c1 = factory[component_type]()
# c1ref = cnew << c1
# add_settings_label(cnew, reference=c1ref)
# gdspath = cnew.write_gds_with_metadata()
# c2 = import_gds(gdspath)
# add_settings_from_label(c2)
# c1s = sort_dict(tuplify(OmegaConf.to_container(c1.settings.full)))
# c2s = sort_dict(tuplify(OmegaConf.to_container(c2.settings.full)))
# # c1s.pop("info")
# # c2s.pop("info")
# # c1s.pop("changed")
# # c2s.pop("changed")
# d = diff(c1s, c2s)
# # print(c1s)
# print(c2s)
# print(d)
# assert len(d) == 0, f"imported settings are different from original {d}"
# return c2
if __name__ == "__main__":
pass
# c = test_properties_components(component_type=list(component_names)[0])
# c = test_properties_components(component_type="ring_single")
# c = test_properties_components(component_type="mzit")
# c = test_properties_components(component_type="bezier")
# c = test_properties_components(component_type="wire_straight")
# c = test_properties_components(component_type="straight")
# c = test_properties_components(component_type="grating_coupler_tree")
# c = test_properties_components(component_type="wire")
# c = test_properties_components(component_type="bend_circular")
# c = test_properties_components(component_type="mzi_arm")
# c = test_properties_components(component_type="straight_pin")
# c.show()
```
#### File: gdsfactory/tests/test_metadata_export.py
```python
import toolz
import gdsfactory as gf
def test_metadata_export_partial():
straight_wide = gf.partial(gf.components.straight, width=2)
c = gf.components.mzi(straight=straight_wide)
d = c.to_dict()
assert d["settings"]["full"]["straight"]["width"] == 2
assert d["settings"]["full"]["straight"]["function"] == "straight"
def test_metadata_export_function():
c = gf.components.mzi()
d = c.to_dict()
assert d["settings"]["full"]["straight"]["function"] == "straight"
def test_metadata_export_compose():
straight_wide = toolz.compose(gf.components.extend_ports, gf.components.straight)
c = gf.components.mzi(straight=straight_wide)
d = c.to_dict()
assert d["settings"]["full"]["straight"][0]["function"] == "straight"
assert d["settings"]["full"]["straight"][1]["function"] == "extend_ports"
if __name__ == "__main__":
test_metadata_export_partial()
test_metadata_export_function()
test_metadata_export_compose()
# c = gf.components.mzi()
# d = c.to_dict()
# print(d.settings.full.straight.function)
# straight_wide = toolz.compose(gf.components.extend_ports, gf.components.straight)
# c = gf.components.mzi(straight=straight_wide)
# d = c.to_dict()
# print(d.settings.full.straight)
# df = d.settings.full
# sf = df.straight
# print(sf)
```
#### File: gdsfactory/tests/test_rotate.py
```python
import gdsfactory as gf
def test_rotate():
c1 = gf.components.straight()
c1r = c1.rotate()
c2 = gf.components.straight()
c2r = c2.rotate()
assert c1.uid == c2.uid
assert c1r.uid == c2r.uid
if __name__ == "__main__":
c1 = gf.components.straight()
c1r = c1.rotate()
c2 = gf.components.straight()
c2r = c2.rotate()
assert c1.uid == c2.uid
assert c1r.uid == c2r.uid
c2r.show()
``` |
{
"source": "JorgePadilla/crabada.py",
"score": 4
} |
#### File: src/helpers/general.py
```python
from typing import Any, List
def firstOrNone(list: List[Any]) -> Any:
"""
Return the first element of a list or None if it is not set
"""
return nthOrNone(list, 0)
def secondOrNone(list: List[Any]) -> Any:
"""
Return the second element of a list or None if it is not set
"""
return nthOrNone(list, 1)
def thirdOrNone(list: List[Any]) -> Any:
"""
Return the third element of a list or None if it is not set
"""
return nthOrNone(list, 2)
def fourthOrNone(list: List[Any]) -> Any:
"""
Return the fourth element of a list or None if it is not set
"""
return nthOrNone(list, 3)
def nthOrNone(list: List[Any], n: int) -> Any:
"""
Return the n-th plus 1 element of a list or None if it is not set
"""
try:
return list[n]
except:
return None
def nthOrLastOrNone(list: List[Any], n: int) -> Any:
"""
Return the n-th element of a list; if it is not set, return
the last element of the list; if it is not set, return none.
"""
if not list:
return None
return list[n] if len(list) > n else list[-1]
def findInList(l: List[dict[str, Any]], key: str, value: Any) -> Any:
"""
Search a list of dictionaries for a specific one
"""
return firstOrNone([item for item in l if item[key] == value])
``` |
{
"source": "jorgeparavicini/FourWins",
"score": 3
} |
#### File: FourWins/tournamentmasters/tournament_master.py
```python
from time import sleep
from typing import List, Callable
from bots import BaseBot
from bots.botutilities import Grid
class TournamentMaster:
win_condition = 4
def __init__(self, bot_1: BaseBot, bot_2: BaseBot, grid_width: int, grid_height: int,
time_between_rounds: float = 0):
self.bot_1 = bot_1
self.bot_2 = bot_2
self.turn = 0
self.is_initialized = False
self.did_stop = False
self.is_paused = False
self.grid = Grid.create(grid_width, grid_height)
self.time_between_rounds = time_between_rounds
self.on_turn_end_cb: List[Callable[[BaseBot], None]] = []
self.on_winner_found_cb: List[Callable[[BaseBot], None]] = []
def current_turns_bot(self):
"""
Selects which bot's turn it is. Can be overridden for different starting techniques or multiple turns in a row.
:return: The bot which gets the next turn.
"""
if self.turn % 2 == 0:
return self.bot_1
else:
return self.bot_2
def play_turn(self) -> (int, int):
bot = self.current_turns_bot()
bot.update_grid(self.grid)
decision = bot.get_guess()
if self.validate_guess(decision):
row = self.get_row_for_first_empty_cell_in_column(decision)
self.set_chip(bot.id, decision, row)
else:
print("INVALID GUESS")
return None
self.turn += 1
return decision, row
def validate_guess(self, guess: int) -> bool:
return not self.grid.is_column_full(guess)
def get_row_for_first_empty_cell_in_column(self, column: int):
return self.get_highest_chip_for_column(column)
def set_chip(self, bot_id: int, column: int, row: int):
self.grid.set_at(column, row, bot_id)
def get_highest_chip_for_column(self, column: int) -> int:
for i, value in enumerate(self.grid.column(column)):
if value is 0:
return min(i, self.grid.height)
return self.grid.height
def check_if_bot_won_at(self, bot_id: int, x: int, y: int) -> bool:
"""
Checks if there is a winner on the current grid.
:returns 0 If there are no winners, otherwise the winners bot's id is
"""
assert (0 <= x < self.grid.width)
assert (0 <= y < self.grid.height)
horizontal = self.grid.check_horizontal_group_at(bot_id, x, y)
vertical = self.grid.check_vertical_group_at(bot_id, x, y)
diagonal_forward = self.grid.check_forward_diagonal_group_at(bot_id, x, y)
diagonal_backward = self.grid.check_backward_diagonal_group_at(bot_id, x, y)
max_group = max(horizontal, vertical, diagonal_forward, diagonal_backward)
if max_group >= self.win_condition:
return True
return False
def play(self):
if self.is_initialized:
print("GAME ALREADY INITIALIZED, create a new tournament.")
self.is_initialized = True
while not self.did_stop:
if self.is_paused:
sleep(self.time_between_rounds)
continue
current_bot = self.current_turns_bot()
placed_location = self.play_turn()
self.on_turn_end(current_bot)
# Check for winner
if placed_location is None:
continue
if self.check_if_bot_won_at(current_bot.id, placed_location[0], placed_location[1]):
self.on_winner_found(current_bot)
break
sleep(self.time_between_rounds)
self.did_stop = True
def on_turn_end(self, bot_played: BaseBot):
for c in self.on_turn_end_cb:
c(bot_played)
def on_winner_found(self, winner_bot: BaseBot):
for c in self.on_winner_found_cb:
c(winner_bot)
``` |
{
"source": "JorgePasco1/among-us-discord-bot",
"score": 3
} |
#### File: app/services/messaging_service.py
```python
from discord import Message
from app.helpers import generate_random_config
async def say_hello(received_message: Message):
salutation = "Hello! I am a bot. Type !help to see the available commands"
await received_message.channel.send(salutation)
async def get_help(received_message: Message):
description_lines = ''
for command in possible_commands.keys():
description_lines = description_lines + \
f"\t**!{command}**: {possible_commands[command]['description']}" + "\n"
response = f"""The available commands are:
{description_lines}
"""
await received_message.channel.send(response)
async def send_random_config(received_message: Message):
config_string = generate_random_config()
try:
sent_message = await received_message.channel.send(config_string)
await sent_message.add_reaction('🔁')
print("Success sending random config")
except Exception as e:
print(str(e))
possible_commands = {
'hello': {
'description': "Get a salutation from the bot.",
'execute': say_hello
},
'help': {
'description': "Get the available commands with a short description",
'execute': get_help
},
'random_settings': {
'description': "Get a random set of settings for your game.",
'execute': send_random_config
}
}
``` |
{
"source": "JorgePe/InterfaceA",
"score": 3
} |
#### File: JorgePe/InterfaceA/ftdi01.py
```python
from pylibftdi import BitBangDevice
import contextlib
FTDI_SN = 'A105BPBO' # Serial Number of the FTDI device, see dmesg
DIRECTION = 0x3F # six LSB are output(1), two MSB are input(0)
# Bit masks for each pin
OUT0 = 0x01 # TXD
OUT1 = 0x02 # RXD
OUT2 = 0x04 # RTS#
OUT3 = 0x08 # CTS#
OUT4 = 0x10 # DTR#
OUT5 = 0x20 # DSR#
INP6 = 0x40 # DCD#
INP7 = 0x80 # RI#
class CursesKeyboard(object):
def __init__(self):
import curses
self.key = curses.initscr()
curses.cbreak()
curses.noecho()
self.key.keypad(1)
self.key.nodelay(1)
def read(self):
return self.key.getch()
def read_code(self):
c = self.key.getch()
if c > -1:
return chr(c)
return ""
def close(self):
import curses
curses.nocbreak()
self.key.keypad(0)
curses.echo()
curses.endwin()
def __del__(self):
try:
self.close()
except:
pass
def zero_all():
# resets all output pins
bb.port = 0x00
def toggle(b):
# b is the index of the bit to toggle (0 to 5)
if b == 0:
if (bb.port & OUT0) == OUT0:
bb.port &= (0xFF-OUT0) # clear
else:
bb.port |= OUT0 # set
elif b == 1:
if (bb.port & OUT1) == OUT1:
bb.port &= (0xFF-OUT1) # clear
else:
bb.port |= OUT1 # set
elif b == 2:
if (bb.port & OUT2) == OUT2:
bb.port &= (0xFF-OUT2) # clear
else:
bb.port |= OUT2 # set
elif b == 3:
if (bb.port & OUT3) == OUT3:
bb.port &= (0xFF-OUT3) # clear
else:
bb.port |= OUT3 # set
elif b == 4:
if (bb.port & OUT4) == OUT4:
bb.port &= (0xFF-OUT4) # clear
else:
bb.port |= OUT4 # set
elif b == 5:
if (bb.port & OUT5) == OUT5:
bb.port &= (0xFF-OUT5) # clear
else:
bb.port |= OUT5 # set
def bridge(pair, command):
# possible pairs: A = OUT 0+1, B=2+3, C=4+5
# possible commands: L(eft), R(ight), B(reak), C(oast)
if pair == 'A':
if command == 'L':
# set 0 and reset 1
bb.port |= OUT0
bb.port &= (0xFF - OUT1)
elif command == 'R':
# reset 0 and set 1
bb.port &= (0xFF - OUT0)
bb.port |= OUT1
elif command == 'B':
# set 0 and 1
bb.port |= OUT0
bb.port |= OUT1
elif command == 'C':
# reset 0 and 1
bb.port &= (0xFF - OUT0)
bb.port &= (0xFF - OUT1)
elif pair == 'B':
if command == 'L':
# set 2 and reset 3
bb.port |= OUT2
bb.port &= (0xFF - OUT3)
elif command == 'R':
# reset 2 and set 3
bb.port &= (0xFF - OUT2)
bb.port |= OUT3
elif command == 'B':
# set 2 and 3
bb.port |= OUT2
bb.port |= OUT3
elif command == 'C':
# reset 2 and 3
bb.port &= (0xFF - OUT2)
bb.port &= (0xFF - OUT3)
elif pair == 'C':
if command == 'L':
# set 4 and reset 5
bb.port |= OUT4
bb.port &= (0xFF - OUT5)
elif command == 'R':
# reset 4 and set 5
bb.port &= (0xFF - OUT4)
bb.port |= OUT5
elif command == 'B':
# set 4 and 5
bb.port |= OUT4
bb.port |= OUT5
elif command == 'C':
# reset 0 and 1
bb.port &= (0xFF - OUT4)
bb.port &= (0xFF - OUT5)
@contextlib.contextmanager
def keyboard_context():
keyboard = CursesKeyboard()
yield keyboard
keyboard.close()
zero_all()
# Main
# auto_detach = False is needed as a workaround to prevent
# segmentation faults when accessing the FTDI device
# see pylibftdi issue #25
bb = BitBangDevice(FTDI_SN, auto_detach=False)
bb.direction = DIRECTION
zero_all()
counter6 = 0
counter7 = 0
print("Use keys '1', '2', '3', '4', '5', '6' or [ASZX], [DFCV], [GHBN] or '0' or 'Q/ESC'")
with keyboard_context() as keys:
while True:
k = keys.read()
# Toggle Commands for individual outputs
if k == ord('1'):
toggle(0)
elif k == ord('2'):
toggle(1)
elif k == ord('3'):
toggle(2)
elif k == ord('4'):
toggle(3)
elif k == ord('5'):
toggle(4)
elif k == ord('6'):
toggle(5)
# Bridge Commands for pairs of outputs
# pair A
elif k == ord('A') or k == ord('a'):
bridge('A','L')
elif k == ord('S') or k == ord('s'):
bridge('A','R')
elif k == ord('Z') or k == ord('z'):
bridge('A','C')
elif k == ord('X') or k == ord('x'):
bridge('A','B')
# pair B
elif k == ord('D') or k == ord('d'):
bridge('B','L')
elif k == ord('F') or k == ord('f'):
bridge('B','R')
elif k == ord('C') or k == ord('c'):
bridge('B','C')
elif k == ord('V') or k == ord('v'):
bridge('B','B')
# pair C
elif k == ord('G') or k == ord('g'):
bridge('C','L')
elif k == ord('H') or k == ord('h'):
bridge('C','R')
elif k == ord('B') or k == ord('b'):
bridge('C','C')
elif k == ord('N') or k == ord('n'):
bridge('C','B')
# reset counters
elif k == ord('0'):
counter6 = 0
counter7 = 0
# Quit with q/Q/ESC
# Try not to use Ctrl+C, terminal could became 'strange'
elif k == ord('Q') or k == ord('q') or k == 27:
break
# Poll inputs
read = bb.port
# print('IN7:', '1' if (read & INP7) == INP7 else '0',
# ' IN6:', '1' if (read & INP6) == INP6 else '0', '\r', end='')
if read & INP6 == INP6:
inp6 = '1'
else:
counter6 += 1
inp6 = '0'
if read & INP7 == INP7:
inp7 = '1'
else:
counter7 += 1
inp7 = '0'
print('IN7: ', inp7, 'IN6: ', inp6, ' Counters 7: ', counter7, '6: ', counter6, ' \r', end='')
```
#### File: JorgePe/InterfaceA/turtle.py
```python
from pylibftdi import BitBangDevice
import contextlib
from time import sleep
FTDI_SN = 'A105BPBO' # Serial Number of the FTDI device, see dmesg
DIRECTION = 0x3F # six LSB are output(1), two MSB are input(0)
# Bit masks for each pin
OUT0 = 0x01 # TXD
OUT1 = 0x02 # RXD
OUT2 = 0x04 # RTS#
OUT3 = 0x08 # CTS#
OUT4 = 0x10 # DTR#
OUT5 = 0x20 # DSR#
INP6 = 0x40 # DCD#
INP7 = 0x80 # RI#
#COMMAND_DELAY = 0.0025
COMMAND_DELAY = 0.005
LOOP_DELAY = 0.1
def zero_all():
# resets all output pins
bb.port = 0x00
#
# set()
# puts output [0..5] at high level
#
def set(b):
if b == 0:
bb.port |= OUT0
elif b == 1:
bb.port |= OUT1
elif b == 2:
bb.port |= OUT2
elif b == 3:
bb.port |= OUT3
elif b == 4:
bb.port |= OUT4
elif b == 5:
bb.port |= OUT5
#
# reset()
# puts output [0..5] at low level
#
def reset(b):
if b == 0:
bb.port &= (0xFF - OUT0)
elif b == 1:
bb.port &= (0xFF - OUT1)
elif b == 2:
bb.port &= (0xFF - OUT2)
elif b == 3:
bb.port &= (0xFF - OUT3)
elif b == 4:
bb.port &= (0xFF - OUT4)
elif b == 5:
bb.port &= (0xFF - OUT5)
#
# toggle()
# changes output [0..5] state
#
def toggle(b):
# b is the index of the bit to toggle (0 to 5)
if b == 0:
if (bb.port & OUT0) == OUT0:
bb.port &= (0xFF-OUT0) # clear
else:
bb.port |= OUT0 # set
elif b == 1:
if (bb.port & OUT1) == OUT1:
bb.port &= (0xFF-OUT1) # clear
else:
bb.port |= OUT1 # set
elif b == 2:
if (bb.port & OUT2) == OUT2:
bb.port &= (0xFF-OUT2) # clear
else:
bb.port |= OUT2 # set
elif b == 3:
if (bb.port & OUT3) == OUT3:
bb.port &= (0xFF-OUT3) # clear
else:
bb.port |= OUT3 # set
elif b == 4:
if (bb.port & OUT4) == OUT4:
bb.port &= (0xFF-OUT4) # clear
else:
bb.port |= OUT4 # set
elif b == 5:
if (bb.port & OUT5) == OUT5:
bb.port &= (0xFF-OUT5) # clear
else:
bb.port |= OUT5 # set
#
# bridge()
# i.e. a pair of ports
# possible pairs: A = OUT 0+1, B=2+3, C=4+5
# possible commands: L(eft), R(ight), B(reak), C(oast)
# not sure if Break acts different of Coast
#
def bridge(pair, command):
if pair == 'A':
if command == 'L':
# set 0 and reset 1
set(0)
reset(1)
elif command == 'R':
# reset 0 and set 1
reset(0)
set(1)
elif command == 'B':
# set 0 and 1
set(0)
set(1)
elif command == 'C':
# reset 0 and 1
reset(0)
reset(1)
elif pair == 'B':
if command == 'L':
# set 2 and reset 3
set(2)
reset(3)
elif command == 'R':
# reset 2 and set 3
reset(2)
set(3)
elif command == 'B':
# set 2 and 3
set(2)
set(3)
elif command == 'C':
# reset 2 and 3
reset(2)
reset(3)
elif pair == 'C':
if command == 'L':
# set 4 and reset 5
set(4)
reset(5)
elif command == 'R':
# reset 4 and set 5
reset(4)
set(5)
elif command == 'B':
# set 4 and 5
set(4)
set(5)
elif command == 'C':
# reset 0 and 1
reset(0)
reset(1)
#
# turtle()
# a turtle has 2 motors (bridge A and B)
# and 1 optosensor at Input 6
#
def turtle_front():
bridge('A', 'L')
bridge('B', 'L')
sleep(COMMAND_DELAY)
bridge('A', 'C')
bridge('B', 'C')
def turtle_back():
bridge('A', 'R')
bridge('B', 'R')
sleep(COMMAND_DELAY)
bridge('A', 'C')
bridge('B', 'C')
def turtle_left():
bridge('A', 'L')
bridge('B', 'R')
sleep(COMMAND_DELAY)
bridge('A', 'C')
bridge('B', 'C')
def turtle_right():
bridge('A', 'R')
bridge('B', 'L')
sleep(COMMAND_DELAY)
bridge('A', 'C')
bridge('B', 'C')
# Main
# auto_detach = False is needed as a workaround to prevent
# segmentation faults when accessing the FTDI device
# see pylibftdi issue #25
bb = BitBangDevice(FTDI_SN, auto_detach=False)
bb.direction = DIRECTION
zero_all()
#
# optosensor only detects black to white transition
# loop left-front until inp6 off then right a bit
#
while True:
# Poll inputs
read = bb.port
# print('IN7:', '1' if (read & INP7) == INP7 else '0',
# ' IN6:', '1' if (read & INP6) == INP6 else '0', '\r', end='')
if read & INP6 == INP6:
inp6 = '1'
else:
inp6 = '0'
if read & INP7 == INP7:
inp7 = '1'
else:
inp7 = '0'
if inp6 == '0':
# transition B-W or just over B
# right a bit
turtle_right()
turtle_right()
turtle_right()
turtle_right()
turtle_right()
turtle_right()
else:
# white
# front and left a bit
turtle_front()
turtle_left()
sleep(LOOP_DELAY)
``` |
{
"source": "JorgePe/ir-spybotics",
"score": 2
} |
#### File: JorgePe/ir-spybotics/ir_generate_lirc.py
```python
BIT_LENGTH = 208 # 1 bit = 208 us long, based on 4800 bit/s
PAUSE = 208 # arbitrary pause of 1 bit between each byte, seems OK
CHANNEL = {'Ch1':0x05, 'Ch2':0x06, 'Ch3':0x07, 'All':0x04}
CMD_ORANGE = {'OrangeNop':0x00, 'OrangeFwd':0x07, 'OrangeRev':0x0F, 'OrangeStp':0x08}
CMD_YELLOW = {'YellowNop':0x00, 'YellowFwd':0x07, 'YellowRev':0x0F, 'YellowStp':0x08}
def reversed( x, num_bits ):
answer = 0
for i in range( num_bits ): # for each bit number
if (x & (1 << i)): # if it matches that bit
answer |= (1 << (num_bits - 1 - i)) # set the "opposite" bit in answer
return answer
def parityOf(int_type):
parity = 0
while (int_type):
parity = ~parity
int_type = int_type & (int_type - 1)
return(parity)
def getsignal(channel, orange, yellow):
# calculate check digit
check = 0x10 - ((channel + orange + yellow) & 0x0F)
# convert to binary
byte1 = channel * 16 + orange
byte2 = yellow * 16 + check
# revert bytes
revbyte1 = reversed(byte1, 8)
revbyte2 = reversed(byte2, 8)
# calculate odd parity
if parityOf(revbyte1) == 0:
odd1 = '1'
else:
odd1 = '0'
if parityOf(revbyte2) == 0:
odd2 = '1'
else:
odd2 = '0'
# create message by adding start bit, byte, parity bit and stop bits (with a 1 bit extra in between)
message = '0' + "{0:08b}".format(revbyte1) + odd1 + '1'
message += '1'
message += '0' + "{0:08b}".format(revbyte2) + odd2 + '1'
lirc = hex(int(message,2))
return message, lirc
# Main
print('begin remote')
print(' name LEGO_Old_RC')
print(' bits 23')
print(' frequency 76000')
print(' one 1 208')
print(' zero 208 1')
print(' gap 208')
print()
print(' begin codes')
for chn in CHANNEL:
for cmd_orange in CMD_ORANGE:
for cmd_yellow in CMD_YELLOW:
msg, lirc = getsignal(CHANNEL[chn], CMD_ORANGE[cmd_orange], CMD_YELLOW[cmd_yellow])
print(' ',chn+'_'+cmd_orange+'_'+cmd_yellow,' ', lirc)
print(' end codes')
print('end remote')
``` |
{
"source": "JorgePe/multicastMIDI-EV3",
"score": 2
} |
#### File: JorgePe/multicastMIDI-EV3/ev3midi.py
```python
from pybricks.hubs import EV3Brick
from pybricks.ev3devices import TouchSensor
from pybricks.parameters import Port
from pybricks.tools import wait
import os
# before start:
# - start multimidicast [&]
# - ensure midipipe exists - mkfifo midipipe
# - start amidicat - ./amidicat --port 128:0 --hex < ./midipipe
import midi_notes
# check if multimidicast is running and start it if not
if os.popen('pgrep multimidicast').read().strip() == '':
os.system('./multimidicast &')
print('multimidicast started')
else:
print('multidicast was running')
wait(500)
# check if midipipe was created
if os.popen('ls midipipe').read().strip() == 'midipipe':
print('midipipe exists')
else:
os.system('mkfifo midipipe')
print('midipipe created')
# check if amidicat is running and start it if not
print(os.popen('pgrep amidicat').read().strip())
if os.popen('pgrep amidicat').read().strip() == '':
os.system('./amidicat --port 128:0 --hex < ./midipipe &')
print('amidicat started')
else:
print('amidicat was running')
print(os.popen('pgrep amidicat').read().strip())
ev3 = EV3Brick()
ts1 = TouchSensor(Port.S1)
ts2 = TouchSensor(Port.S2)
ts3 = TouchSensor(Port.S3)
ts4 = TouchSensor(Port.S4)
# notes associated to each sensor
my_notes = [midi_notes.C4, midi_notes.D4, midi_notes.E4, midi_notes.F4]
ALL_NOTES_OFF = "B0 7B 00"
def send_note_on(note):
if note in my_notes:
pipe.write("90 " + note + " 7F")
else:
# mistake ?
pipe.write(ALL_NOTES_OFF)
def send_note_off(note):
if note in my_notes:
pipe.write("80 " + note + " 00")
else:
# mistake ?
pipe.write(ALL_NOTES_OFF)
key_1_on = False
key_2_on = False
key_3_on = False
key_4_on = False
pipe = open("./midipipe", "w")
send_note_off('ALL')
while True :
if ts1.pressed() == True :
if key_1_on == False :
send_note_on(my_notes[0])
key_1_on = True
else:
if key_1_on == True :
send_note_off(my_notes[0])
key_1_on = False
if ts2.pressed() == True :
if key_2_on == False :
send_note_on(my_notes[1])
key_2_on = True
else:
if key_2_on == True :
send_note_off(my_notes[1])
key_2_on = False
if ts3.pressed() == True :
if key_3_on == False :
send_note_on(my_notes[2])
key_3_on = True
else:
if key_3_on == True :
send_note_off(my_notes[2])
key_3_on = False
if ts4.pressed() == True :
if key_4_on == False :
send_note_on(my_notes[3])
key_4_on = True
else:
if key_4_on == True :
send_note_off(my_notes[3])
key_4_on = False
#it never gets here but it is important to do this on exit
send_note_off('ALL')
pipe.close()
``` |
{
"source": "JorgePe/pybricks-api",
"score": 2
} |
#### File: doc/common/conf.py
```python
import os
import re
import sys
from docutils import nodes
from docutils.parsers.rst.directives import flag
from docutils.parsers.rst import Directive
from sphinx.application import Sphinx
from sphinx.domains.python import PyClassmember, PythonDomain
import toml
TOP_DIR = os.path.abspath(os.path.join('..', '..'))
sys.path.insert(0, TOP_DIR)
sys.path.append(os.path.abspath('../common/extensions'))
from pybricks.hubs import EV3Brick # noqa E402
from pybricks.media.ev3dev import Image # noqa E402
from pybricks._common import Speaker # noqa E402
# ON_RTD is whether we are on readthedocs.org
# this line of code grabbed from docs.readthedocs.org
ON_RTD = os.environ.get('READTHEDOCS', None) == 'True'
_pyproject = toml.load(os.path.join(TOP_DIR, 'pyproject.toml'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
'color',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['../common/_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = "v" + _pyproject["tool"]["poetry"]["version"]
# The short X.Y version.
version = re.match(r'(v\d+\.\d+)', release)[0]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# Figure numbering
numfig = True
numfig_format = {
'figure': 'Figure %s',
'table': 'Table %s',
'code-block': 'Listing %s',
'section': 'Section %s'
}
# Find cross-reference errors
nitpicky = True
# https://stackoverflow.com/a/30624034/1976323
nitpick_ignore = [
('py:class', 'bool'),
('py:class', 'bytearray'),
('py:class', 'bytes'),
('py:class', 'callable'),
('py:class', 'dict'),
('py:class', 'float'),
('py:class', 'int'),
('py:class', 'iter'),
('py:class', 'list'),
('py:class', 'object'),
('py:class', 'str'),
('py:class', 'tuple'),
('py:exc', 'OSError'),
('py:exc', 'RuntimeError'),
('py:exc', 'TypeError'),
('py:exc', 'ValueError'),
]
# -- Autodoc options ------------------------------------------------------
autodoc_member_order = 'bysource'
autodoc_default_options = {
'members': True,
'undoc-members': True,
}
autoclass_content = 'both' # This ensures init arguments are not ignored
add_module_names = False # Hide module name
# -- Options for HTML output ----------------------------------------------
if ON_RTD:
html_theme = 'default'
else:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# a little extra javascript
html_js_files = [
'js/custom.js'
]
html_context = {
'disclaimer': _DISCLAIMER,
}
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'style_external_links': True,
'logo_only': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['../common/_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# Don't hyperlink to larger images for scaled images.
html_scaled_image_link = False
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pybricksdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
'preamble': r'''
\usepackage{CJKutf8}
\makeatletter
\fancypagestyle{normal}{
\fancyhf{}
\fancyfoot[R]{{\py@HeaderFamily\thepage}}
\fancyfoot[C]{\raisebox{-7mm}{\tiny %(disclaimer)s}}
\fancyhead[L]{{\py@HeaderFamily \@title}}
\fancyhead[R]{{\py@HeaderFamily \py@release}}
\renewcommand{\headrulewidth}{0.4pt}
\renewcommand{\footrulewidth}{0.4pt}
}
\fancypagestyle{plain}{
\fancyhf{}
\fancyfoot[R]{{\py@HeaderFamily\thepage}}
\fancyfoot[C]{\raisebox{-7mm}{\tiny %(disclaimer)s}}
\renewcommand{\headrulewidth}{0.4pt}
\renewcommand{\footrulewidth}{0.4pt}
}
\makeatother
''' % {
'disclaimer': ' '.join((_DISCLAIMER, '©', copyright)),
},
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
'extraclassoptions': 'openany,oneside',
'releasename': 'Version',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, ''.join([project, '-v', version, '.tex']), _TITLE,
author, 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pybricks', 'Pybricks Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Pybricks', 'Pybricks Documentation',
author, 'Pybricks', 'One line description of project.',
'Miscellaneous'),
]
# -- .. availability:: directive
class AvailabilityDirective(Directive):
has_content = True
option_spec = {
'movehub': flag,
'cityhub': flag,
'technichub': flag,
'ev3dev-stretch': flag,
}
def run(self):
if not self.options:
raise self.error('Must specify at least one platform.')
# TODO: make links to platform pages
return [nodes.emphasis(text='Availability: '),
nodes.Text(', '.join(self.options))]
def setup(app: Sphinx):
app.add_directive('availability', AvailabilityDirective)
# -- Python domain hacks ---------------------------------------------------
real_get_signature_prefix = PyClassmember.get_signature_prefix
def get_signature_prefix(self, sig):
# hacks for battery and light
if sig.count('.') >= 2:
return ''
return real_get_signature_prefix(self, sig)
PyClassmember.get_signature_prefix = get_signature_prefix
# HACK: For certain hub attributes, we list the class members of the attributes
# class as if the attribute was a nested class so that readers don't have to
# skip around the docs as much. To make this work, we replace the attribute
# values with the type and override PythonDomain.find_obj so that references
# still work.
base_find_obj = PythonDomain.find_obj
def find_obj(self, env, modname, classname, name, type, searchmode=0):
if modname == 'pybricks.hubs':
if classname == 'screen':
if name.startswith('Font.') or name == 'Font':
modname = 'pybricks.media.ev3dev'
elif name.startswith('Image.') or name == 'Image':
modname = 'pybricks.media.ev3dev'
else:
classname = 'EV3Brick.screen'
elif classname == 'speaker':
classname = 'EV3Brick.speaker'
return base_find_obj(self, env, modname, classname, name, type, searchmode)
PythonDomain.find_obj = find_obj
EV3Brick.screen = Image
EV3Brick.speaker = Speaker
``` |
{
"source": "JorgePe/randomideas",
"score": 3
} |
#### File: JorgePe/randomideas/blubbl3-01.py
```python
from ev3dev2.motor import MediumMotor, OUTPUT_A, OUTPUT_B, OUTPUT_C, SpeedPercent, DcMotor
from ev3dev2.port import LegoPort
from ev3dev2.sensor import INPUT_1, INPUT_2
from ev3dev2.sensor.lego import TouchSensor
#from ev3dev2.led import Leds
from threading import Thread
from time import sleep
running = False
bubble_handle_thread = False
def bubble_handle():
while bubble_handle_thread == True:
# every step is tested so we can exit faster
if running == True:
m3.on_to_position(SpeedPercent(30),-90)
if running == True:
m3.wait_until_not_moving()
if running == True:
sleep(0.25)
if running == True:
m3.on_to_position(SpeedPercent(30),-6)
if running == True:
m3.wait_until_not_moving()
if running == True:
sleep(1.9)
# initialize motor on Port A for 'dc-motor' mode (am using a Power Functions L motor through an adapter cable)
pA = LegoPort(OUTPUT_A).mode = 'dc-motor'
sleep(0.6) # looks like it needs time for new mode to settle
m1 = DcMotor(OUTPUT_A) # Airjitsu Propeller
m1.duty_cycle_sp=100
#m2 = DcMotor(OUTPUT_B) # Peristaltic Pump
#m2.duty_cycle_sp=100
m3 = MediumMotor(OUTPUT_C) # Bubble Handle
m3.position = 0
ts1 = TouchSensor(INPUT_1) # Bubble Production
#ts2 = TouchSensor(INPUT_2) # Liquid refill
t= Thread(target=bubble_handle)
bubble_handle_thread = True
t.start()
while True:
if ts1.is_pressed:
running = not running
sleep(0.25)
if running == True:
m1.run_forever()
else:
m1.stop()
# needs to return bubble handle to rest position
sleep(1.7)
m3.wait_until_not_moving()
m3.on_to_position(SpeedPercent(30),0)
# will never reach this
```
#### File: MIDI/DrumKit/ipmididrum.py
```python
from pybricks.hubs import EV3Brick
from pybricks.ev3devices import ColorSensor
#from pybricks.iodevices import Ev3devSensor
from pybricks.parameters import Port
from pybricks.tools import wait
#from math import log10
import os
# before start:
# - ensure midipipe exists - mkfifo midipipe
# - two choices:
# + use USB MIDI adapter at 20:0
# - start amidicat - ./amidicat --port 20:0 --hex < /.midipipe
# + use ipMIDI
# - start multimidicast [&]
# - start amidicat - ./amidicat --port 128:0 --hex < ./midipipe
import midi_notes
# check if midipipe was created
if os.popen('ls midipipe').read().strip() == 'midipipe':
print('midipipe exists')
else:
os.system('mkfifo midipipe')
print('midipipe created')
wait(500)
# check if multimidicast is running and start it if not
if os.popen('pgrep multimidicast').read().strip() == '':
os.system('./multimidicast -i wlxd03745176d00 -q &')
print('multimidicast started')
else:
print('multidicast was running')
wait(500)
# check if amidicat is running and start it if not
print(os.popen('pgrep amidicat').read().strip())
if os.popen('pgrep amidicat').read().strip() == '':
os.system('./amidicat --port 128:0 --hex < ./midipipe &')
print('amidicat started')
else:
print('amidicat was running')
wait(500)
ev3 = EV3Brick()
pad1 = ColorSensor(Port.S1)
pad2 = ColorSensor(Port.S2)
pad3 = ColorSensor(Port.S3)
pad4 = ColorSensor(Port.S4)
#DRUM = "08" # need to see other instruments but some don't stop note
#DRUM = "10"
#DRUM_PATCH = "08" # Room kit ? 9
DRUM_PATCH = "10" # Power kit ? 17
# CHANNEL 10 = A
ALL_NOTES_OFF = "B9 7B 00"
def send_note_on(note, velocity="64"):
pipe.write("99 " + note + " " + velocity)
# print('Note ON: ', note, velocity)
def send_note_off(note):
pipe.write("89 " + note + " 00")
# print('Note OFF: ', note)
def mute():
pipe.write(ALL_NOTES_OFF)
# print('Mute')
def select_instrument(instr):
pipe.write("C9 " + instr)
# print('Instrument: ', instr)
def calc_velocity(pressure, reference):
# convert pressure to a velocity value
# a string with the hexdecimal representation
# this should be logarithmic instead of linear
# velocity = int(VELOCITY_FACTOR * pressure) + VELOCITY_THRESHOLD
# velocity = int( log10(pressure) * 54 )
velocity = (pressure - reference) * 2
# cap velocity
if velocity > 127 :
velocity = 127
if velocity > 15:
vel = hex(velocity)[2:]
else:
vel = "0" + hex(velocity)[2:]
return vel
pipe = open("./midipipe", "w")
wait(100)
mute()
select_instrument(DRUM_PATCH)
wait(1000)
# instrumento 47h parecem todos igais ao BASS DRUM
HIGHQ = "1B"
SLAP = "1C"
BASS_DRUM_1 = "24" # baaaaad - maybe velocity or duration
BASS_DRUM_2 = "23" # baad
SNARE_DRUM = "26" #
HIGH_TOM = "32" #
MID_TOM = "2F" #
LOW_TOM = "2B" # ?
OPEN_HIHAT = "2E" #?
FOOT_HIHAT = "2C"
CLOSED_HIHAT = "2A"
RIDE_CYMBAL = "33"
CRASH_CYMBAL = "2A"
HAND_CLAP = "27"
mute()
note1 = BASS_DRUM_1
note2 = HAND_CLAP
note3 = SNARE_DRUM
note4 = RIDE_CYMBAL
while True:
p1 = pad1.reflection()
p2 = pad2.reflection()
p3 = pad3.reflection()
p4 = pad4.reflection()
# print(p1, p2, p3, p4)
# 63 56 46 71
if p1 > 65:
print("#1", p1)
# send_note_on(note1, calc_velocity(p1, 36))
send_note_on(note1, "31")
wait(1)
send_note_on(note1, "6D")
if p2 > 58:
print("#2", p2)
send_note_on(note2, calc_velocity(p2, 29))
if p3 > 49:
print("#3", p3)
send_note_on(note3, calc_velocity(p3, 19))
if p4 > 76:
print("#4", p4)
send_note_on(note4, calc_velocity(p4, 34))
#it never gets here but it is important to do this on exit
mute()
pipe.close()
``` |
{
"source": "jorgeperezc/pulumi-grafana",
"score": 2
} |
#### File: python/pulumi_grafana/alert_notification.py
```python
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AlertNotificationArgs', 'AlertNotification']
@pulumi.input_type
class AlertNotificationArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
disable_resolve_message: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
secure_settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
send_reminder: Optional[pulumi.Input[bool]] = None,
settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AlertNotification resource.
:param pulumi.Input[str] type: The type of the alert notification channel.
:param pulumi.Input[bool] disable_resolve_message: Whether to disable sending resolve messages.
:param pulumi.Input[str] frequency: Frequency of alert reminders. Frequency must be set if reminders are enabled.
:param pulumi.Input[bool] is_default: Is this the default channel for all your alerts.
:param pulumi.Input[str] name: The name of the alert notification channel.
:param pulumi.Input[Mapping[str, Any]] secure_settings: Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
:param pulumi.Input[bool] send_reminder: Whether to send reminders for triggered alerts.
:param pulumi.Input[Mapping[str, Any]] settings: Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
:param pulumi.Input[str] uid: Unique identifier. If unset, this will be automatically generated.
"""
pulumi.set(__self__, "type", type)
if disable_resolve_message is not None:
pulumi.set(__self__, "disable_resolve_message", disable_resolve_message)
if frequency is not None:
pulumi.set(__self__, "frequency", frequency)
if is_default is not None:
pulumi.set(__self__, "is_default", is_default)
if name is not None:
pulumi.set(__self__, "name", name)
if secure_settings is not None:
pulumi.set(__self__, "secure_settings", secure_settings)
if send_reminder is not None:
pulumi.set(__self__, "send_reminder", send_reminder)
if settings is not None:
pulumi.set(__self__, "settings", settings)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of the alert notification channel.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="disableResolveMessage")
def disable_resolve_message(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to disable sending resolve messages.
"""
return pulumi.get(self, "disable_resolve_message")
@disable_resolve_message.setter
def disable_resolve_message(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_resolve_message", value)
@property
@pulumi.getter
def frequency(self) -> Optional[pulumi.Input[str]]:
"""
Frequency of alert reminders. Frequency must be set if reminders are enabled.
"""
return pulumi.get(self, "frequency")
@frequency.setter
def frequency(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "frequency", value)
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> Optional[pulumi.Input[bool]]:
"""
Is this the default channel for all your alerts.
"""
return pulumi.get(self, "is_default")
@is_default.setter
def is_default(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_default", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the alert notification channel.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="secureSettings")
def secure_settings(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
"""
return pulumi.get(self, "secure_settings")
@secure_settings.setter
def secure_settings(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "secure_settings", value)
@property
@pulumi.getter(name="sendReminder")
def send_reminder(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to send reminders for triggered alerts.
"""
return pulumi.get(self, "send_reminder")
@send_reminder.setter
def send_reminder(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "send_reminder", value)
@property
@pulumi.getter
def settings(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
"""
return pulumi.get(self, "settings")
@settings.setter
def settings(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "settings", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier. If unset, this will be automatically generated.
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
@pulumi.input_type
class _AlertNotificationState:
def __init__(__self__, *,
disable_resolve_message: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
secure_settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
send_reminder: Optional[pulumi.Input[bool]] = None,
settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
type: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AlertNotification resources.
:param pulumi.Input[bool] disable_resolve_message: Whether to disable sending resolve messages.
:param pulumi.Input[str] frequency: Frequency of alert reminders. Frequency must be set if reminders are enabled.
:param pulumi.Input[bool] is_default: Is this the default channel for all your alerts.
:param pulumi.Input[str] name: The name of the alert notification channel.
:param pulumi.Input[Mapping[str, Any]] secure_settings: Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
:param pulumi.Input[bool] send_reminder: Whether to send reminders for triggered alerts.
:param pulumi.Input[Mapping[str, Any]] settings: Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
:param pulumi.Input[str] type: The type of the alert notification channel.
:param pulumi.Input[str] uid: Unique identifier. If unset, this will be automatically generated.
"""
if disable_resolve_message is not None:
pulumi.set(__self__, "disable_resolve_message", disable_resolve_message)
if frequency is not None:
pulumi.set(__self__, "frequency", frequency)
if is_default is not None:
pulumi.set(__self__, "is_default", is_default)
if name is not None:
pulumi.set(__self__, "name", name)
if secure_settings is not None:
pulumi.set(__self__, "secure_settings", secure_settings)
if send_reminder is not None:
pulumi.set(__self__, "send_reminder", send_reminder)
if settings is not None:
pulumi.set(__self__, "settings", settings)
if type is not None:
pulumi.set(__self__, "type", type)
if uid is not None:
pulumi.set(__self__, "uid", uid)
@property
@pulumi.getter(name="disableResolveMessage")
def disable_resolve_message(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to disable sending resolve messages.
"""
return pulumi.get(self, "disable_resolve_message")
@disable_resolve_message.setter
def disable_resolve_message(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_resolve_message", value)
@property
@pulumi.getter
def frequency(self) -> Optional[pulumi.Input[str]]:
"""
Frequency of alert reminders. Frequency must be set if reminders are enabled.
"""
return pulumi.get(self, "frequency")
@frequency.setter
def frequency(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "frequency", value)
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> Optional[pulumi.Input[bool]]:
"""
Is this the default channel for all your alerts.
"""
return pulumi.get(self, "is_default")
@is_default.setter
def is_default(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_default", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the alert notification channel.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="secureSettings")
def secure_settings(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
"""
return pulumi.get(self, "secure_settings")
@secure_settings.setter
def secure_settings(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "secure_settings", value)
@property
@pulumi.getter(name="sendReminder")
def send_reminder(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to send reminders for triggered alerts.
"""
return pulumi.get(self, "send_reminder")
@send_reminder.setter
def send_reminder(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "send_reminder", value)
@property
@pulumi.getter
def settings(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
"""
return pulumi.get(self, "settings")
@settings.setter
def settings(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "settings", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the alert notification channel.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def uid(self) -> Optional[pulumi.Input[str]]:
"""
Unique identifier. If unset, this will be automatically generated.
"""
return pulumi.get(self, "uid")
@uid.setter
def uid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uid", value)
class AlertNotification(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
disable_resolve_message: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
secure_settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
send_reminder: Optional[pulumi.Input[bool]] = None,
settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
type: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a AlertNotification resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] disable_resolve_message: Whether to disable sending resolve messages.
:param pulumi.Input[str] frequency: Frequency of alert reminders. Frequency must be set if reminders are enabled.
:param pulumi.Input[bool] is_default: Is this the default channel for all your alerts.
:param pulumi.Input[str] name: The name of the alert notification channel.
:param pulumi.Input[Mapping[str, Any]] secure_settings: Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
:param pulumi.Input[bool] send_reminder: Whether to send reminders for triggered alerts.
:param pulumi.Input[Mapping[str, Any]] settings: Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
:param pulumi.Input[str] type: The type of the alert notification channel.
:param pulumi.Input[str] uid: Unique identifier. If unset, this will be automatically generated.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AlertNotificationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a AlertNotification resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param AlertNotificationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AlertNotificationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
disable_resolve_message: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
secure_settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
send_reminder: Optional[pulumi.Input[bool]] = None,
settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
type: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AlertNotificationArgs.__new__(AlertNotificationArgs)
__props__.__dict__["disable_resolve_message"] = disable_resolve_message
__props__.__dict__["frequency"] = frequency
__props__.__dict__["is_default"] = is_default
__props__.__dict__["name"] = name
__props__.__dict__["secure_settings"] = secure_settings
__props__.__dict__["send_reminder"] = send_reminder
__props__.__dict__["settings"] = settings
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["uid"] = uid
super(AlertNotification, __self__).__init__(
'grafana:index/alertNotification:AlertNotification',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
disable_resolve_message: Optional[pulumi.Input[bool]] = None,
frequency: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
secure_settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
send_reminder: Optional[pulumi.Input[bool]] = None,
settings: Optional[pulumi.Input[Mapping[str, Any]]] = None,
type: Optional[pulumi.Input[str]] = None,
uid: Optional[pulumi.Input[str]] = None) -> 'AlertNotification':
"""
Get an existing AlertNotification resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] disable_resolve_message: Whether to disable sending resolve messages.
:param pulumi.Input[str] frequency: Frequency of alert reminders. Frequency must be set if reminders are enabled.
:param pulumi.Input[bool] is_default: Is this the default channel for all your alerts.
:param pulumi.Input[str] name: The name of the alert notification channel.
:param pulumi.Input[Mapping[str, Any]] secure_settings: Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
:param pulumi.Input[bool] send_reminder: Whether to send reminders for triggered alerts.
:param pulumi.Input[Mapping[str, Any]] settings: Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
:param pulumi.Input[str] type: The type of the alert notification channel.
:param pulumi.Input[str] uid: Unique identifier. If unset, this will be automatically generated.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AlertNotificationState.__new__(_AlertNotificationState)
__props__.__dict__["disable_resolve_message"] = disable_resolve_message
__props__.__dict__["frequency"] = frequency
__props__.__dict__["is_default"] = is_default
__props__.__dict__["name"] = name
__props__.__dict__["secure_settings"] = secure_settings
__props__.__dict__["send_reminder"] = send_reminder
__props__.__dict__["settings"] = settings
__props__.__dict__["type"] = type
__props__.__dict__["uid"] = uid
return AlertNotification(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="disableResolveMessage")
def disable_resolve_message(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to disable sending resolve messages.
"""
return pulumi.get(self, "disable_resolve_message")
@property
@pulumi.getter
def frequency(self) -> pulumi.Output[Optional[str]]:
"""
Frequency of alert reminders. Frequency must be set if reminders are enabled.
"""
return pulumi.get(self, "frequency")
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> pulumi.Output[Optional[bool]]:
"""
Is this the default channel for all your alerts.
"""
return pulumi.get(self, "is_default")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the alert notification channel.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="secureSettings")
def secure_settings(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Additional secure settings, for full reference lookup [Grafana Supported Settings
documentation](https://grafana.com/docs/grafana/latest/administration/provisioning/#supported-settings).
"""
return pulumi.get(self, "secure_settings")
@property
@pulumi.getter(name="sendReminder")
def send_reminder(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to send reminders for triggered alerts.
"""
return pulumi.get(self, "send_reminder")
@property
@pulumi.getter
def settings(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Additional settings, for full reference see [Grafana HTTP API
documentation](https://grafana.com/docs/grafana/latest/http_api/alerting_notification_channels/).
"""
return pulumi.get(self, "settings")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the alert notification channel.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def uid(self) -> pulumi.Output[str]:
"""
Unique identifier. If unset, this will be automatically generated.
"""
return pulumi.get(self, "uid")
```
#### File: python/pulumi_grafana/dashboard.py
```python
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['DashboardArgs', 'Dashboard']
@pulumi.input_type
class DashboardArgs:
def __init__(__self__, *,
config_json: pulumi.Input[str],
folder: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Dashboard resource.
:param pulumi.Input[str] config_json: The complete dashboard model JSON.
:param pulumi.Input[int] folder: The id of the folder to save the dashboard in.
"""
pulumi.set(__self__, "config_json", config_json)
if folder is not None:
pulumi.set(__self__, "folder", folder)
@property
@pulumi.getter(name="configJson")
def config_json(self) -> pulumi.Input[str]:
"""
The complete dashboard model JSON.
"""
return pulumi.get(self, "config_json")
@config_json.setter
def config_json(self, value: pulumi.Input[str]):
pulumi.set(self, "config_json", value)
@property
@pulumi.getter
def folder(self) -> Optional[pulumi.Input[int]]:
"""
The id of the folder to save the dashboard in.
"""
return pulumi.get(self, "folder")
@folder.setter
def folder(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "folder", value)
@pulumi.input_type
class _DashboardState:
def __init__(__self__, *,
config_json: Optional[pulumi.Input[str]] = None,
dashboard_id: Optional[pulumi.Input[int]] = None,
folder: Optional[pulumi.Input[int]] = None,
slug: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Dashboard resources.
:param pulumi.Input[str] config_json: The complete dashboard model JSON.
:param pulumi.Input[int] dashboard_id: The numeric ID of the dashboard computed by Grafana.
:param pulumi.Input[int] folder: The id of the folder to save the dashboard in.
:param pulumi.Input[str] slug: URL friendly version of the dashboard title.
"""
if config_json is not None:
pulumi.set(__self__, "config_json", config_json)
if dashboard_id is not None:
pulumi.set(__self__, "dashboard_id", dashboard_id)
if folder is not None:
pulumi.set(__self__, "folder", folder)
if slug is not None:
pulumi.set(__self__, "slug", slug)
@property
@pulumi.getter(name="configJson")
def config_json(self) -> Optional[pulumi.Input[str]]:
"""
The complete dashboard model JSON.
"""
return pulumi.get(self, "config_json")
@config_json.setter
def config_json(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_json", value)
@property
@pulumi.getter(name="dashboardId")
def dashboard_id(self) -> Optional[pulumi.Input[int]]:
"""
The numeric ID of the dashboard computed by Grafana.
"""
return pulumi.get(self, "dashboard_id")
@dashboard_id.setter
def dashboard_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "dashboard_id", value)
@property
@pulumi.getter
def folder(self) -> Optional[pulumi.Input[int]]:
"""
The id of the folder to save the dashboard in.
"""
return pulumi.get(self, "folder")
@folder.setter
def folder(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "folder", value)
@property
@pulumi.getter
def slug(self) -> Optional[pulumi.Input[str]]:
"""
URL friendly version of the dashboard title.
"""
return pulumi.get(self, "slug")
@slug.setter
def slug(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "slug", value)
class Dashboard(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_json: Optional[pulumi.Input[str]] = None,
folder: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Create a Dashboard resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config_json: The complete dashboard model JSON.
:param pulumi.Input[int] folder: The id of the folder to save the dashboard in.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DashboardArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Dashboard resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param DashboardArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_json: Optional[pulumi.Input[str]] = None,
folder: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DashboardArgs.__new__(DashboardArgs)
if config_json is None and not opts.urn:
raise TypeError("Missing required property 'config_json'")
__props__.__dict__["config_json"] = config_json
__props__.__dict__["folder"] = folder
__props__.__dict__["dashboard_id"] = None
__props__.__dict__["slug"] = None
super(Dashboard, __self__).__init__(
'grafana:index/dashboard:Dashboard',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
config_json: Optional[pulumi.Input[str]] = None,
dashboard_id: Optional[pulumi.Input[int]] = None,
folder: Optional[pulumi.Input[int]] = None,
slug: Optional[pulumi.Input[str]] = None) -> 'Dashboard':
"""
Get an existing Dashboard resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] config_json: The complete dashboard model JSON.
:param pulumi.Input[int] dashboard_id: The numeric ID of the dashboard computed by Grafana.
:param pulumi.Input[int] folder: The id of the folder to save the dashboard in.
:param pulumi.Input[str] slug: URL friendly version of the dashboard title.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DashboardState.__new__(_DashboardState)
__props__.__dict__["config_json"] = config_json
__props__.__dict__["dashboard_id"] = dashboard_id
__props__.__dict__["folder"] = folder
__props__.__dict__["slug"] = slug
return Dashboard(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configJson")
def config_json(self) -> pulumi.Output[str]:
"""
The complete dashboard model JSON.
"""
return pulumi.get(self, "config_json")
@property
@pulumi.getter(name="dashboardId")
def dashboard_id(self) -> pulumi.Output[int]:
"""
The numeric ID of the dashboard computed by Grafana.
"""
return pulumi.get(self, "dashboard_id")
@property
@pulumi.getter
def folder(self) -> pulumi.Output[Optional[int]]:
"""
The id of the folder to save the dashboard in.
"""
return pulumi.get(self, "folder")
@property
@pulumi.getter
def slug(self) -> pulumi.Output[str]:
"""
URL friendly version of the dashboard title.
"""
return pulumi.get(self, "slug")
```
#### File: python/pulumi_grafana/organization.py
```python
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['OrganizationArgs', 'Organization']
@pulumi.input_type
class OrganizationArgs:
def __init__(__self__, *,
admin_user: Optional[pulumi.Input[str]] = None,
admins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_users: Optional[pulumi.Input[bool]] = None,
editors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
viewers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Organization resource.
:param pulumi.Input[str] admin_user: The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
:param pulumi.Input[Sequence[pulumi.Input[str]]] admins: A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[bool] create_users: Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] editors: A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[str] name: The display name for the Grafana organization created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] viewers: A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
if admin_user is not None:
pulumi.set(__self__, "admin_user", admin_user)
if admins is not None:
pulumi.set(__self__, "admins", admins)
if create_users is not None:
pulumi.set(__self__, "create_users", create_users)
if editors is not None:
pulumi.set(__self__, "editors", editors)
if name is not None:
pulumi.set(__self__, "name", name)
if viewers is not None:
pulumi.set(__self__, "viewers", viewers)
@property
@pulumi.getter(name="adminUser")
def admin_user(self) -> Optional[pulumi.Input[str]]:
"""
The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
"""
return pulumi.get(self, "admin_user")
@admin_user.setter
def admin_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "admin_user", value)
@property
@pulumi.getter
def admins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "admins")
@admins.setter
def admins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "admins", value)
@property
@pulumi.getter(name="createUsers")
def create_users(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
"""
return pulumi.get(self, "create_users")
@create_users.setter
def create_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "create_users", value)
@property
@pulumi.getter
def editors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "editors")
@editors.setter
def editors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "editors", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The display name for the Grafana organization created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def viewers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "viewers")
@viewers.setter
def viewers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "viewers", value)
@pulumi.input_type
class _OrganizationState:
def __init__(__self__, *,
admin_user: Optional[pulumi.Input[str]] = None,
admins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_users: Optional[pulumi.Input[bool]] = None,
editors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[int]] = None,
viewers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Organization resources.
:param pulumi.Input[str] admin_user: The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
:param pulumi.Input[Sequence[pulumi.Input[str]]] admins: A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[bool] create_users: Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] editors: A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[str] name: The display name for the Grafana organization created.
:param pulumi.Input[int] org_id: The organization id assigned to this organization by Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] viewers: A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
if admin_user is not None:
pulumi.set(__self__, "admin_user", admin_user)
if admins is not None:
pulumi.set(__self__, "admins", admins)
if create_users is not None:
pulumi.set(__self__, "create_users", create_users)
if editors is not None:
pulumi.set(__self__, "editors", editors)
if name is not None:
pulumi.set(__self__, "name", name)
if org_id is not None:
pulumi.set(__self__, "org_id", org_id)
if viewers is not None:
pulumi.set(__self__, "viewers", viewers)
@property
@pulumi.getter(name="adminUser")
def admin_user(self) -> Optional[pulumi.Input[str]]:
"""
The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
"""
return pulumi.get(self, "admin_user")
@admin_user.setter
def admin_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "admin_user", value)
@property
@pulumi.getter
def admins(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "admins")
@admins.setter
def admins(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "admins", value)
@property
@pulumi.getter(name="createUsers")
def create_users(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
"""
return pulumi.get(self, "create_users")
@create_users.setter
def create_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "create_users", value)
@property
@pulumi.getter
def editors(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "editors")
@editors.setter
def editors(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "editors", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The display name for the Grafana organization created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> Optional[pulumi.Input[int]]:
"""
The organization id assigned to this organization by Grafana.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter
def viewers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "viewers")
@viewers.setter
def viewers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "viewers", value)
class Organization(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_user: Optional[pulumi.Input[str]] = None,
admins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_users: Optional[pulumi.Input[bool]] = None,
editors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
viewers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
Create a Organization resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] admin_user: The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
:param pulumi.Input[Sequence[pulumi.Input[str]]] admins: A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[bool] create_users: Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] editors: A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[str] name: The display name for the Grafana organization created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] viewers: A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[OrganizationArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a Organization resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param OrganizationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OrganizationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
admin_user: Optional[pulumi.Input[str]] = None,
admins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_users: Optional[pulumi.Input[bool]] = None,
editors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
viewers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OrganizationArgs.__new__(OrganizationArgs)
__props__.__dict__["admin_user"] = admin_user
__props__.__dict__["admins"] = admins
__props__.__dict__["create_users"] = create_users
__props__.__dict__["editors"] = editors
__props__.__dict__["name"] = name
__props__.__dict__["viewers"] = viewers
__props__.__dict__["org_id"] = None
super(Organization, __self__).__init__(
'grafana:index/organization:Organization',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
admin_user: Optional[pulumi.Input[str]] = None,
admins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_users: Optional[pulumi.Input[bool]] = None,
editors: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[int]] = None,
viewers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Organization':
"""
Get an existing Organization resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] admin_user: The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
:param pulumi.Input[Sequence[pulumi.Input[str]]] admins: A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[bool] create_users: Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] editors: A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
:param pulumi.Input[str] name: The display name for the Grafana organization created.
:param pulumi.Input[int] org_id: The organization id assigned to this organization by Grafana.
:param pulumi.Input[Sequence[pulumi.Input[str]]] viewers: A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OrganizationState.__new__(_OrganizationState)
__props__.__dict__["admin_user"] = admin_user
__props__.__dict__["admins"] = admins
__props__.__dict__["create_users"] = create_users
__props__.__dict__["editors"] = editors
__props__.__dict__["name"] = name
__props__.__dict__["org_id"] = org_id
__props__.__dict__["viewers"] = viewers
return Organization(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="adminUser")
def admin_user(self) -> pulumi.Output[Optional[str]]:
"""
The login name of the configured default admin user for the Grafana installation. If unset, this value defaults to
admin, the Grafana default. Grafana adds the default admin user to all organizations automatically upon creation, and
this parameter keeps Terraform from removing it from organizations.
"""
return pulumi.get(self, "admin_user")
@property
@pulumi.getter
def admins(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of email addresses corresponding to users who should be given admin access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "admins")
@property
@pulumi.getter(name="createUsers")
def create_users(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not to create Grafana users specified in the organization's membership if they don't already exist in
Grafana. If unspecified, this parameter defaults to true, creating placeholder users with the name, login, and email set
to the email of the user, and a random password. Setting this option to false will cause an error to be thrown for any
users that do not already exist in Grafana.
"""
return pulumi.get(self, "create_users")
@property
@pulumi.getter
def editors(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of email addresses corresponding to users who should be given editor access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "editors")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The display name for the Grafana organization created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Output[int]:
"""
The organization id assigned to this organization by Grafana.
"""
return pulumi.get(self, "org_id")
@property
@pulumi.getter
def viewers(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of email addresses corresponding to users who should be given viewer access to the organization. Note: users
specified here must already exist in Grafana unless 'create_users' is set to true.
"""
return pulumi.get(self, "viewers")
``` |
{
"source": "jorgeperezg/wavespectra-1",
"score": 4
} |
#### File: wavespectra/core/timer.py
```python
import time
class Timer(object):
"""Wrap blocks of code to be timed.
with Timer(msg='Elapsed time to do stuff') as t:
do stuff
"""
def __init__(self, msg="elapsed time", verbose=True):
"""Timer object.
Args:
- msg (str): will be printed before the elapsed time value.
- verbose (bool): if True prints Elapsed time, if False only creates instance
with attributes.
"""
self.msg = msg
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, *args):
self.secs = time.time() - self.start
print(f"{self.msg}: {self.secs:f} sec")
```
#### File: wavespectra/input/spotter.py
```python
import glob
import datetime
import json
import numpy as np
import xarray as xr
from dateutil.parser import parse
from wavespectra.specdataset import SpecDataset
from wavespectra.core.attributes import attrs, set_spec_attributes
def read_spotter(filename):
"""Read Spectra from spotter JSON file.
Args:
- filename (list, str): File name or file glob specifying spotter files to read.
Returns:
- dset (SpecDataset): spectra dataset object read from file.
"""
spot = Spotter(filename)
dset = spot.run()
return dset
class Spotter:
def __init__(self, filename_or_fileglob, toff=0):
"""Read wave spectra file from TRIAXYS buoy.
Args:
- filename_or_fileglob (str, list): filename or fileglob
specifying files to read.
- toff (float): time offset in hours to account for
time zone differences.
Returns:
- dset (SpecDataset) wavespectra SpecDataset instance.
Remark:
- frequencies and directions from first file are used as reference
to interpolate spectra from other files in case they differ.
In fact interpolation is still not implemented here, code will break
if spectral coordinates are different.
"""
self._filename_or_fileglob = filename_or_fileglob
self.toff = toff
def run(self):
"""Returns wave spectra dataset from one or more spotter files."""
dsets = []
for self.filename in self.filenames:
self._load_json()
self._set_arrays_from_json()
dsets.append(self._construct_dataset())
# Ensure same spectral coords across files, interp needs to be implemented
if not self._is_unique([dset.freq.values for dset in dsets]):
raise NotImplementedError(
"Varying frequency arrays between spotter files not yet supported."
)
if not self._is_unique([dset.dir.values for dset in dsets]):
raise NotImplementedError(
"Varying direction arrays between spotter files not yet supported."
)
# Concatenating datasets from multiple files
self.dset = xr.concat(dsets, dim="time")
return self.dset
def _is_unique(self, arrays):
"""Returns True if all iterators in arrays are the same."""
if len(set(tuple(array) for array in arrays)) == 1:
return True
else:
return False
def _set_arrays_from_json(self):
"""Set spectra attributes from arrays in json blob."""
# Spectra
keys = self.data["data"]["frequencyData"][0].keys()
for key in keys:
setattr(
self,
key,
[sample[key] for sample in self.data["data"]["frequencyData"]],
)
# Keep here only for checking - timestamps seem to differ
self.timestamp_spec = self.timestamp
self.latitude_spec = self.latitude
self.longitude_spec = self.longitude
# Parameters
if "waves" in self.data["data"]:
keys = self.data["data"]["waves"][0].keys()
for key in keys:
setattr(
self, key, [sample[key] for sample in self.data["data"]["waves"]]
)
# Keep here only for checking - timestamps seem to differ
self.timestamp_param = self.timestamp
self.latitude_param = self.latitude
self.longitude_param = self.longitude
def _construct_dataset(self):
"""Construct wavespectra dataset."""
self.dset = xr.DataArray(
data=self.efth, coords=self.coords, dims=self.dims, name=attrs.SPECNAME
).to_dataset()
self.dset[attrs.LATNAME] = xr.DataArray(
data=self.latitude, coords={"time": self.dset.time}, dims=("time")
)
self.dset[attrs.LONNAME] = xr.DataArray(
data=self.longitude, coords={"time": self.dset.time}, dims=("time")
)
set_spec_attributes(self.dset)
return self.dset
def _load_json(self):
"""Load data from json blob."""
with open(self.filename) as json_file:
self.data = json.load(json_file)
try:
self.data["data"]["spotterId"]
except KeyError:
raise OSError(f"Not a Spotter Spectra file: {self.filename}")
@property
def time(self):
"""The time coordinate values."""
return [
parse(time).replace(tzinfo=None) - datetime.timedelta(hours=self.toff)
for time in self.timestamp
]
@property
def efth(self):
"""The Variance density data values."""
return [np.expand_dims(varden, axis=1) for varden in self.varianceDensity]
@property
def freq(self):
"""The frequency coordinate values."""
if not self._is_unique(self.frequency):
raise NotImplementedError(
"Varying frequency arrays in single file not yet supported."
)
return self.frequency[0]
@property
def dir(self):
"""The direction coordinate values, currently set to [0.] for 1D spectra."""
return [0.0]
@property
def dims(self):
"""The dataset dimensions."""
return (attrs.TIMENAME, attrs.FREQNAME, attrs.DIRNAME)
@property
def coords(self):
"""The dataset coordinates."""
return {
attrs.TIMENAME: self.time,
attrs.FREQNAME: self.freq,
attrs.DIRNAME: self.dir,
}
@property
def filenames(self):
if isinstance(self._filename_or_fileglob, list):
filenames = sorted(self._filename_or_fileglob)
elif isinstance(self._filename_or_fileglob, str):
filenames = sorted(glob.glob(self._filename_or_fileglob))
if not filenames:
raise ValueError(f"No file located in {self._filename_or_fileglob}")
return filenames
```
#### File: wavespectra/output/json.py
```python
import json
def to_json(self, filename, mode="w", date_format="%Y-%m-%dT%H:%M:%SZ"):
"""Write spectra in json format.
Xarray's `to_dict` it used to dump dataset into dictionary to save as a json file.
Args:
- filename (str): name of output json file.
- mode (str): file mode, by default `w` (create or overwrite).
- date_format(str): strftime format for serializing datetimes.
"""
dset_dict = self.to_dict()
for item in ["coords", "data_vars"]:
if "time" in dset_dict[item]:
times = list(getattr(self, item)["time"].dt.strftime(date_format).values)
dset_dict[item]["time"]["data"] = times
with open(filename, mode=mode) as fp:
json.dump(dset_dict, fp)
```
#### File: wavespectra/output/swan.py
```python
from wavespectra.core.attributes import attrs
from wavespectra.core.swan import SwanSpecFile
def to_swan(
self,
filename,
append=False,
id="Created by wavespectra",
ntime=None
):
"""Write spectra in SWAN ASCII format.
Args:
- filename (str): str, name for output SWAN ASCII file.
- append (bool): if True append to existing filename.
- id (str): used for header in output file.
- ntime (int, None): number of times to load into memory before dumping output
file if full dataset does not fit into memory, choose None to load all times.
Note:
- Only datasets with lat/lon coordinates are currently supported.
- Extra dimensions other than time, site, lon, lat, freq, dim not yet
supported.
- Only 2D spectra E(f,d) are currently supported.
- ntime=None optimises speed as the dataset is loaded into memory however the
dataset may not fit into memory in which case a smaller number of times may
be prescribed.
"""
# If grid reshape into site, otherwise ensure there is site dim to iterate over
dset = self._check_and_stack_dims()
ntime = min(ntime or dset.time.size, dset.time.size)
# Ensure time dimension exists
is_time = attrs.TIMENAME in dset[attrs.SPECNAME].dims
if not is_time:
dset = dset.expand_dims({attrs.TIMENAME: [None]})
times = dset[attrs.TIMENAME].values
else:
times = dset[attrs.TIMENAME].to_index().to_pydatetime()
times = [f"{t:%Y%m%d.%H%M%S}" for t in times]
# Keeping only supported dimensions
dims_to_keep = {attrs.TIMENAME, attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME}
dset = dset.drop_dims(set(dset.dims) - dims_to_keep)
# Ensure correct shape
dset = dset.transpose(attrs.TIMENAME, attrs.SITENAME, attrs.FREQNAME, attrs.DIRNAME)
# Instantiate swan object
try:
x = dset.lon.values
y = dset.lat.values
except AttributeError as err:
raise NotImplementedError(
"lon-lat variables are required to write SWAN spectra file"
) from err
sfile = SwanSpecFile(
filename,
freqs=dset.freq,
dirs=dset.dir,
time=is_time,
x=x,
y=y,
append=append,
id=id,
)
# Dump each timestep
i0 = 0
i1 = ntime
while i1 <= dset.time.size or i0 < dset.time.size:
ds = dset.isel(time=slice(i0, i1))
part_times = times[i0:i1]
i0 = i1
i1 += ntime
specarray = ds[attrs.SPECNAME].values
for itime, time in enumerate(part_times):
darrout = specarray[itime]
sfile.write_spectra(darrout, time=time)
sfile.close()
``` |
{
"source": "jorgeperezg/wavespectra",
"score": 2
} |
#### File: tests/core/test_plot.py
```python
import os
import pytest
from wavespectra import read_swan
FILES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../sample_files")
@pytest.fixture(scope='module')
def load_specdataset():
"""Load SpecDset but skip test if matplotlib is not installed."""
pytest.importorskip("matplotlib")
dset = read_swan(os.path.join(FILES_DIR, "swanfile.spec"), as_site=True)
return dset
# def teardown_module():
# plt = pytest.importorskip("matplotlib.pyplot")
# plt.show()
def test_single_pcolormesh(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.pcolormesh()
def test_single_contour(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.contour()
def test_single_contourf(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.contourf()
def test_single_as_period(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.contourf(as_period=True)
def test_single_no_log10(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.contourf(as_period=True, as_log10=False)
def test_single_sliced(load_specdataset):
plt = pytest.importorskip("matplotlib.pyplot")
fmin = 0.04
fmax = 0.2
darray = load_specdataset.isel(time=0, site=0).efth
darray_sliced1 = darray.sel(freq=slice(fmin, fmax))
darray_sliced2 = darray.spec.split(fmin=fmin, fmax=fmax)
darray_sliced1.spec.plot.contourf()
darray_sliced2.spec.plot.contourf()
darray.spec.plot.contourf()
ax = plt.gca()
ax.set_rmin(fmin)
ax.set_rmax(fmax)
def test_single_set_properties_xarr_mpl(load_specdataset):
dset = load_specdataset.isel(time=0, site=0)
dset.efth.spec.plot.contourf(
cmap="viridis",
vmin=-5,
vmax=-2,
levels=15,
add_colorbar=False,
)
def test_multi(load_specdataset):
dset = load_specdataset.isel(site=0)
dset.efth.spec.plot.contourf(
col="time",
col_wrap=3,
levels=15,
figsize=(15,8),
vmax=-1
)
def test_multi_clean_axis(load_specdataset):
dset = load_specdataset.isel(site=0)
dset.efth.spec.plot.contourf(
col="time",
col_wrap=3,
clean_radius=True,
clean_sector=True
)
```
#### File: wavespectra/construct/specconstruct.py
```python
import os
import numpy as np
import xarray as xr
from wavespectra.core.attributes import attrs, set_spec_attributes
from wavespectra.specdataset import SpecDataset
def prepare_reconstruction(spec_info, base_dset=None):
""" Load parameters for spectral reconstruction.
Arguments:
spec_info: dictionary for updating reconstruction defaults. Optionally extra variables to keep or rename.
- coordinates:
- spectral: freq, dir
- frequency spectrum:
- jonswap/TMA: hs, tp, gamma, dpt
- ochihubble: hs, tp, lam
- directional distribution:
- cos2s: dp, dspr
- normal: dp, dspr
base_dset: path or xarray dataset object
Returns:
ds: xarray dataset with parameters for spectral reconstruction. part dimension is used for concatenation
"""
reconstruction_defaults = {
"freq": np.arange(0.04, 1.0, 0.02), # frequencies
"dir": np.arange(0, 360, 10), # directions
"hs": "hs", # significant wave height
"tp": "tp", # peak period
"gamma": None, # jonswap peak enhancement factor
"dpt": None, # water depth
"lam": None, # ochihubble peak enhancement factor
"dp": "dp", # peak direction
"dspr": None, # directional spread
} # fields used for reconstruction. It can be updated with fields of base_dset, numbers, or datarrays
reconstruction_info = reconstruction_defaults.copy()
reconstruction_info.update(spec_info)
if base_dset is None:
ds0 = xr.Dataset()
elif isinstance(base_dset, str):
if os.path.isfile(base_dset):
ds0 = xr.open_dataset(base_dset)
else:
ds0 = xr.open_mfdataset(base_dset, combine="by_coords")
else:
ds0 = base_dset
ds = ds0[[]] # to keep metadata
spc_coords = ("freq", "dir")
for k, v in reconstruction_info.items():
if isinstance(v, xr.DataArray) and k in spc_coords:
ds[k] = v.values
elif isinstance(v, (list, tuple)) and k not in spc_coords:
ds[k] = xr.concat(
[ds0[e] if isinstance(e, str) else xr.DataArray(e) for e in v],
dim="part",
coords="minimal",
).assign_coords({"part": range(len(v))})
elif isinstance(v, str):
ds[k] = ds0[v]
elif v is None:
if k in ds0:
ds[k] = ds0[k]
else:
ds[k] = v
return ds
def finite_depth(freqs, dpt):
"""Factors for modifiying JONSWAP spectra in shallow water (TMA spectrum)
Arguments:
freqs: frequencies
dpt: water depth
Returns:
phi: factors between 0 and 1 for each frequency
"""
w = 2 * np.pi * freqs
whg = w * (dpt / 9.81) ** 0.5
phi = w ** 0 # filled with ones
phi[whg < 2] = 1 - 0.5 * (2 - whg[whg < 2]) ** 2
phi[whg < 1] = 0.5 * whg[whg < 1] ** 2
return phi
def calc_Sf_jonswap(freqs, hs, fp, gamma, dpt=None, sigmaA=0.07, sigmaB=0.09, alpha=-1):
""" Reconstruct JONSWAP or TMA frequency spectra
Arguments:
freqs: frequencies
hs: significant wave height
fp: peak frequency
gamma: jonswap peak enhancement factor
dpt: water depth
sigmaA, sigmaB: spectral width parameters
alpha: normalization factor
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
sigma = xr.where(freqs <= fp, sigmaA, sigmaB)
r = np.exp(-((freqs - fp) ** 2.0) / (2 * sigma ** 2 * fp ** 2))
Sf = 0.0617 * freqs ** (-5) * np.exp(-1.25 * (freqs / fp) ** (-4)) * gamma ** r
if dpt is not None:
Sf *= finite_depth(freqs, dpt)
if alpha < 0: # normalizing by integration
alpha = (hs / Sf.spec.hs()) ** 2 # make sure m0=Hm0^2/16=int S(w)dw
elif alpha == 0: # original normalization for default values
alpha = 5.061 * hs ** 2 * fp ** 4 * (1 - 0.287 * np.log(gamma))
return (alpha * Sf).fillna(0) # alpha>0 is applied directly
gamma_fun = (
lambda x: np.sqrt(2.0 * np.pi / x)
* ((x / np.exp(1.0)) * np.sqrt(x * np.sinh(1.0 / x))) ** x
) # alternative to scipy.special.gamma
def calc_Sf_ochihubble(freqs, hs, fp, lam):
""" Reconstruct OCHI-HUBBLE frequency spectra
Arguments:
freqs: frequencies
hs: Significant wave height
fp: peak frequency
lam: ochihubble peak enhancement factor
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
w = 2 * np.pi * freqs
w0 = 2 * np.pi * fp
B = xr.ufuncs.maximum(lam, 0.01) + 0.25
A = 0.5 * np.pi * hs ** 2 * ((B * w0 ** 4) ** lam / gamma_fun(lam))
a = xr.ufuncs.minimum((w0 / w) ** 4, 100.0)
Sf = A * np.exp(-B * a) / (w ** (4.0 * B))
return Sf.fillna(0)
def calc_Dth_cos2s(dirs, dp, dspr):
"""Cosine 2s spreading function.
Arguments:
dirs: direction coordinates
dp: wave directions
dspr: wave directional spreads
Returns:
Dth: normalized spreading
Note:
Function defined such that \int{Dth d\theta}=1*
"""
th1 = 0.5 * np.deg2rad(dirs)
th2 = 0.5 * np.deg2rad(dp)
a = abs(
np.cos(th1) * np.cos(th2) + np.sin(th1) * np.sin(th2)
) # cos(a-b) = cos(a)cos(b)+sin(a)sin(b)
# Converting to cos2s spreading parameter
# see Holthuijsen pag165
s = (2./(dspr*np.pi/180)**2)-1
Dth = a ** (2.0 * s) # cos((dirs-dp)/2) ** (2*s)
Dth /= Dth.sum("dir") * abs(dirs[1] - dirs[0])
return Dth
def calc_Dth_normal(dirs, dp, dspr):
"""Normal distribution spreading
Arguments:
dirs: direction coordinates
dp: wave directions
dspr: wave directional spreads
Returns:
Dth: normalized spreading
"""
ddif0 = abs(dirs % 360 - dp % 360)
ddifmin = np.minimum(ddif0, 360 - ddif0)
Dth = np.exp((-(ddifmin ** 2)) / (2 * dspr ** 2)) / (dspr * (2 * np.pi) ** 0.5)
# TODO: wrapped normal but it's a bit pointless for real world dspr values
Dth /= Dth.sum("dir") * abs(dirs[1] - dirs[0])
return Dth
@xr.register_dataset_accessor("construct")
class SpecConstruct(object):
def __init__(self, xarray_obj):
self._obj = xarray_obj
def Sf(self, stype=""):
""" Wrapper for calc_Sf functions
Arguments:
stype: frequency spectrum type
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
if not stype or stype == "jonswap":
Sf = calc_Sf_jonswap(
self._obj.freq,
self._obj.hs,
1 / self._obj.tp,
self._obj.get("gamma", 3.3),
self._obj.get("dpt", None),
)
elif stype == "ochihubble":
Sf = calc_Sf_ochihubble(
self._obj.freq, self._obj.hs, 1 / self._obj.tp, self._obj.lam
)
else:
raise ValueError
return Sf
def Dth(self, dtype=""):
""" Wrapper for calc_Dth functions
Arguments:
dtype: directionl distribution type
Returns:
Dth: normalized directional spreading
"""
dspr = self._obj.get("dspr", 30)
if not dtype or dtype == "cos2s":
Dth = calc_Dth_cos2s(self._obj.dir, self._obj.dp, dspr)
elif dtype == "normal":
Dth = calc_Dth_normal(self._obj.dir, self._obj.dp, dspr)
else:
raise ValueError
return Dth
def efth(self, stype="", dtype="", sumdim="part"):
""" Reconstruct directional spectra
Arguments:
stype: frequency spectrum type
dtype: directionl distribution type
sumdim: dimension to sum values
Returns:
efth: xarray dataarray with reconstructed frequency-direction spectra
"""
efth = self.Sf(stype) * self.Dth(dtype)
if sumdim in efth.coords:
efth = efth.sum(dim=sumdim)
return efth
def to_dset(self, spec_info={}, **kwargs):
""" Create wavespectra dataset
Arguments:
spec_info: dictionary for updating reconstruction defaults.
Returns:
ds: wavespectra dataset with reconstructed frequency-direction spectra
"""
# TODO: Ensure that all arrays have wavespectra compatible names
if spec_info:
ds = prepare_reconstruction(spec_info, base_dset=self._obj)
else:
ds = self._obj.copy()
ds[attrs.SPECNAME] = ds.construct.efth(**kwargs)
set_spec_attributes(ds)
return ds
if __name__ == "__main__":
# Example1
spec_info = {
"hs": [1, 3],
"tp": [5, 12],
"gamma": 3.3,
"dp": [10, 40],
"dspr": [35, 25],
}
ds = prepare_reconstruction(spec_info).construct.to_dset()
# # Example2
# spec_info = {
# "hs": ["phs0", "phs1", "phs2"],
# "tp": ["ptp0", "ptp1", "ptp2"],
# "gamma": [1.0, 3.3, 3.3],
# "dp": ["pdir0", "pdir1", "pdir2"],
# "dspr": 30,
# }
# ds = xr.open_dataset(grdfile).construct.to_dset(spec_info)
# # Example3
# dstmp = xr.open_dataset(grdfile).isel(time=1, longitude=range(79, 82), latitude=62)
# spec_info = {
# 'hs': ["sea8hs", "sw8hs"],
# 'tp': ["sea8tp", "sw8tp"],
# "lam": [1.54 * np.exp(-0.062 * dstmp.hs), 3.00],
# "dp": ["sea8dp", "sw8dp"],
# "dspr": [35, 25],
# }
# ds = dstmp.construct.to_dset(spec_info, stype="ochihubble", dtype="normal")
```
#### File: wavespectra/core/specpartpy.py
```python
import numpy as np
def ptnghb(nk, nth):
"""Short description.
Long description if required.
Args:
- `nk`: number of frequencies/wavenumbers in spectrum.
- `nth`: number of directions in spectrum.
Returns:
- `neigh`: add description.
"""
# build list of neighbours for each point
nspec = nk * nth
neigh = [[] for _ in range(nspec)]
for n in range(nspec):
ith = n % nth
ik = n // nth
if ik > 0: # ... point at the bottom
neigh[n].append(n - nth)
if ik < nk - 1: # ... point at the top
neigh[n].append(n + nth)
if ith > 0: # ... point at the left
neigh[n].append(n - 1)
else: # ... with wrap.
neigh[n].append(n - 1 + nth)
if ith < nth - 1: # ... point at the right
neigh[n].append(n + 1)
else: # ... with wrap.
neigh[n].append(n + 1 - nth)
if ik > 0 and ith > 0: # ... point at the bottom-left
neigh[n].append(n - nth - 1)
elif ik > 0 and ith == 0: # ... with wrap.
neigh[n].append(n - nth - 1 + nth)
if ik < nk - 1 and ith > 0: # ... point at the top-left
neigh[n].append(n + nth - 1)
elif ik < nk - 1 and ith == 0: # ... with wrap
neigh[n].append(n + nth - 1 + nth)
if ik > 0 and ith < nth - 1: # ... point at the bottom-right
neigh[n].append(n - nth + 1)
elif ik > 0 and ith == nth - 1: # ... with wrap
neigh[n].append(n - nth + 1 - nth)
if ik < nk - 1 and ith < nth - 1: # ... point at the top-right
neigh[n].append(n + nth + 1)
elif ik < nk - 1 and ith == nth - 1: # ... with wrap
neigh[n].append(n + nth + 1 - nth)
return neigh
def partition(spec, ihmax=200):
"""Return the array with numbered partitions.
Args:
- `spec`: 2D spectrum array Ed(y=freq, x=dir).
- `ihmax`: add description.
Returns:
- `part_array`: array with same shape of `spec` with
the numbered partitions.
"""
nk, nth = spec.shape # ensure this is the correct order
neigh = ptnghb(nk, nth)
nspec = spec.size
zmin = spec.min()
zmax = spec.max()
zp = -spec.flatten() + zmax
fact = (ihmax - 1) / (zmax - zmin)
imi = np.around(zp * fact).astype(int)
ind = zp.argsort()
# 0. initializations
imo = -np.ones(nspec, dtype=int) # mask = -2, init = -1, iwshed = 0
ic_label = 0
imd = np.zeros(nspec, dtype=int)
ifict_pixel = -100
iq1 = []
mstart = 0
# 1. loop over levels
for ih in range(ihmax):
# 1.a pixels at level ih
for m in range(mstart, nspec):
ip = ind[m]
if imi[ip] != ih:
break
# flag the point, if it stays flagged, it is a separate minimum.
imo[ip] = -2
# if there is neighbor, set distance and add to queue.
if any(imo[neigh[ip]] >= 0):
imd[ip] = 1
iq1.append(ip)
# 1.b process the queue
ic_dist = 1
iq1.append(ifict_pixel)
while True:
ip = iq1.pop(0)
# check for end of processing
if ip == ifict_pixel:
if not iq1:
break
iq1.append(ifict_pixel)
ic_dist += 1
ip = iq1.pop(0)
# process queue
for ipp in neigh[ip]:
# check for labeled watersheds or basins
if imo[ipp] >= 0 and imd[ipp] < ic_dist:
if imo[ipp] > 0:
if imo[ip] in [-2, 0]:
imo[ip] = imo[ipp]
elif imo[ip] != imo[ipp]:
imo[ip] = 0
elif imo[ip] == -2:
imo[ip] = 0
elif imo[ipp] == -2 and imd[ipp] == 0:
imd[ipp] = ic_dist + 1
iq1.append(ipp)
# 1.c check for mask values in imo to identify new basins
for ip in ind[mstart:m]:
imd[ip] = 0
if imo[ip] == -2:
ic_label += 1 # ... new basin
iq2 = [ip]
while iq2:
imo[iq2] = ic_label
iqset = set([n for i in iq2 for n in neigh[i]])
iq2 = [
i for i in iqset if imo[i] == -2
] # ... all masked points connected to it
mstart = m
# 2. find nearest neighbor of 0 watershed points and replace
# use original input to check which group to affiliate with 0
# storing changes first in imd to assure symetry in adjustment.
for _ in range(5):
watershed0 = np.where(imo == 0)[0]
if not any(watershed0):
break
newvals = []
for jl in watershed0:
jnl = [j for j in neigh[jl] if imo[j] != 0]
if any(jnl):
ipt = abs(zp[jnl] - zp[jl]).argmin()
newvals.append(imo[jnl[ipt]])
else:
newvals.append(0)
imo[watershed0] = newvals
part_array = imo.reshape(spec.shape)
return part_array
```
#### File: wavespectra/input/ww3_msl.py
```python
import numpy as np
import xarray as xr
from wavespectra.core.attributes import attrs, set_spec_attributes
from wavespectra.specdataset import SpecDataset
def read_ww3_msl(filename_or_fileglob, chunks={}):
"""Read Spectra from WAVEWATCHIII MetOcean Solutions netCDF format.
Args:
- filename_or_fileglob (str): filename or fileglob specifying multiple
files to read.
- chunks (dict): chunk sizes for dimensions in dataset. By default
dataset is loaded using single chunk for all dimensions (see
xr.open_mfdataset documentation).
Returns:
- dset (SpecDataset): spectra dataset object read from ww3 file.
Note:
- If file is large to fit in memory, consider specifying chunks for
'time' and/or 'site' dims
"""
dset = xr.open_mfdataset(filename_or_fileglob, chunks=chunks)
_units = dset.specden.attrs.get("units", "")
dset = dset.rename(
{"freq": attrs.FREQNAME, "dir": attrs.DIRNAME, "wsp": attrs.WSPDNAME}
)
dset[attrs.SPECNAME] = (dset["specden"].astype("float32") + 127.0) * dset["factor"]
dset = dset.drop(["specden", "factor", "df"])
# Assign site coordinate so they will look like those read from native ww3 files
dset[attrs.SITENAME] = np.arange(1.0, dset.site.size + 1)
set_spec_attributes(dset)
dset[attrs.SPECNAME].attrs.update({"_units": _units, "_variable_name": "specden"})
if attrs.DIRNAME not in dset or len(dset.dir) == 1:
dset[attrs.SPECNAME].attrs.update({"units": "m^{2}.s"})
return dset
```
#### File: wavespectra/input/wwm.py
```python
import dask.array as da
import numpy as np
import xarray as xr
from wavespectra.core.attributes import attrs, set_spec_attributes
from wavespectra.core.misc import uv_to_spddir
from wavespectra.specdataset import SpecDataset
R2D = 180 / np.pi
def read_wwm(filename_or_fileglob, chunks={}, convert_wind_vectors=True):
"""Read Spectra from SWAN native netCDF format.
Args:
- filename_or_fileglob (str): filename or fileglob specifying multiple
files to read.
- chunks (dict): chunk sizes for dimensions in dataset. By default
dataset is loaded using single chunk for all dimensions (see
xr.open_mfdataset documentation).
- convert_wind_vectors (bool): choose it to convert wind vectors into
speed / direction data arrays.
Returns:
- dset (SpecDataset): spectra dataset object read from ww3 file.
Note:
- If file is large to fit in memory, consider specifying chunks for
'time' and/or 'station' dims.
"""
dset = xr.open_mfdataset(filename_or_fileglob, chunks=chunks)
_units = dset.AC.attrs.get("units", "")
dset = dset.rename(
{
"nfreq": attrs.FREQNAME,
"ndir": attrs.DIRNAME,
"nbstation": attrs.SITENAME,
"AC": attrs.SPECNAME,
"lon": attrs.LONNAME,
"lat": attrs.LATNAME,
"DEP": attrs.DEPNAME,
"ocean_time": attrs.TIMENAME,
}
)
# Calculating wind speeds and directions
if convert_wind_vectors and "Uwind" in dset and "Vwind" in dset:
dset[attrs.WSPDNAME], dset[attrs.WDIRNAME] = uv_to_spddir(
dset["Uwind"], dset["Vwind"], coming_from=True
)
# Setting standard names and storing original file attributes
set_spec_attributes(dset)
dset[attrs.SPECNAME].attrs.update(
{"_units": _units, "_variable_name": attrs.SPECNAME}
)
# Assigning spectral coordinates
#import ipdb; ipdb.set_trace()
dset[attrs.FREQNAME] = dset.spsig / (2 * np.pi) # convert rad to Hz
dset[attrs.DIRNAME] = dset.spdir
# converting Action to Energy density and adjust density to Hz
dset[attrs.SPECNAME] = dset[attrs.SPECNAME] * dset.spsig * (2 * np.pi)
# Converting from radians
dset[attrs.DIRNAME] *= R2D
dset[attrs.SPECNAME] /= R2D
# Returns only selected variables, transposed
to_drop = [
dvar
for dvar in dset.data_vars
if dvar
not in [
attrs.SPECNAME,
attrs.WSPDNAME,
attrs.WDIRNAME,
attrs.DEPNAME,
attrs.LONNAME,
attrs.LATNAME,
]
]
dims = [d for d in ["time", "site", "freq", "dir"] if d in dset.efth.dims]
return dset.drop(to_drop).transpose(*dims)
if __name__ == "__main__":
import os
import matplotlib.pyplot as plt
import xarray as xr
FILES_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "../../tests/sample_files"
)
dset = read_wwm(os.path.join(FILES_DIR, "wwmfile.nc"))
ds = xr.open_dataset("/source/wavespectra/tests/sample_files/wwmfile.nc")
hs_wwm = ds.HS
tp_wwm = ds.TPP
hs_wavespectra = dset.spec.spec.hs()
tp_wavespectra = dset.spec.spec.tp()
plt.figure()
hs_wavespectra.isel(site=0).plot(label="wavespectra")
hs_wwm.isel(nbstation=0).plot(label="wwm")
plt.title("Hs")
plt.legend()
plt.figure()
tp_wavespectra.isel(site=0).plot(label="wavespectra")
tp_wwm.isel(nbstation=0).plot(label="wwm")
plt.title("Tp")
plt.legend()
s = dset.isel(site=0, time=5).rename({"freq": "period"})
s.period.values = 1.0 / s.period.values
plt.figure()
s.efth.plot()
print("Tp from file: {}".format(ds.isel(nbstation=0, ocean_time=5).TPP.values))
plt.show()
``` |
{
"source": "jorgepiloto/cryptokit",
"score": 4
} |
#### File: src/cipherkit/alphabets.py
```python
def spanish():
""" Returns the 27 character spanish alphabet.
Parameters
----------
None
Returns
-------
ABCDEFGHIJKLMNÑOPQRSTUVWXYZ
"""
return "ABCDEFGHIJKLMNÑOPQRSTUVWXYZ"
def english():
""" Returns the 26 character english alphabet.
Parameters
----------
None
Returns
-------
ABCDEFGHIJKLMNOPQRSTUVWXYZ
"""
return "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def decimal():
""" Returns the 10 character decimal alphabet.
Parameters
----------
None
Returns
-------
0123456789
"""
return "0123456789"
def ascii_basic():
r""" Returns the 127 character ASCII alphabet.
Parameters
----------
None
Returns
-------
' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~'
"""
return " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
```
#### File: cipherkit/core/caesar.py
```python
def caesar(plain_text, key, alphabet, mode="cipher"):
""" Cipher by making use of caesar algorithm.
Parameters
----------
plain_text: str
Text to be ciphered.
key: int
Key for cipher.
alphabet:
Alphabet that contains the characters of the language.
Returns
-------
hidde_text: str
Ciphered text.
"""
if not isinstance(key, int):
raise ValueError("Key should be an integer value.")
hidden_text = ""
for character in plain_text.upper():
if character in alphabet:
# This means we can cipher that letter
pos = alphabet.index(character)
if mode == "cipher":
pos = (pos + key) % len(alphabet)
else:
pos = (pos - key) % len(alphabet)
hidden_text += alphabet[pos]
else:
hidden_text += character
return hidden_text
```
#### File: cryptokit/tests/test_alphabets.py
```python
from cipherkit.alphabets import ascii_basic
from cipherkit.alphabets import decimal
from cipherkit.alphabets import english
from cipherkit.alphabets import spanish
def test_alphabet_spanish():
expected_alphabet = "ABCDEFGHIJKLMNÑOPQRSTUVWXYZ"
assert spanish() == expected_alphabet
def test_alphabet_english():
expected_alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
assert english() == expected_alphabet
def test_alphabet_decimal():
expected_alphabet = "0123456789"
assert decimal() == expected_alphabet
def test_alphabet_ascii_basic():
expected_alphabet = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
assert ascii_basic() == expected_alphabet
``` |
{
"source": "jorgepiloto/lamberthub",
"score": 3
} |
#### File: lamberthub/ecc_solvers/avanzini.py
```python
import time
import numpy as np
from scipy.optimize import newton
from lamberthub.ecc_solvers.utils import (
_f,
coe_at_eccT,
get_fundamental_ellipse_properties,
get_geometry,
)
from lamberthub.utils.assertions import assert_parameters_are_valid
from lamberthub.utils.elements import coe2rv
def avanzini2008(
mu,
r1,
r2,
tof,
M=0,
prograde=True,
low_path=True,
maxiter=35,
atol=1e-5,
rtol=1e-7,
full_output=False,
):
r"""
Solves Lambert problem using Gooding's devised algorithm.
Parameters
----------
mu: float
Gravitational parameter, equivalent to :math:`GM` of attractor body.
r1: numpy.array
Initial position vector.
r2: numpy.array
Final position vector.
M: int
Number of revolutions. Must be equal or greater than 0 value.
prograde: bool
If `True`, specifies prograde motion. Otherwise, retrograde motion is imposed.
low_path: bool
If two solutions are available, it selects between high or low path.
maxiter: int
Maximum number of iterations.
atol: float
Absolute tolerance.
rtol: float
Relative tolerance.
full_output: bool
If True, the number of iterations and time per iteration are also returned.
Returns
-------
v1: numpy.array
Initial velocity vector.
v2: numpy.array
Final velocity vector.
numiter: int
Number of iterations.
tpi: float
Time per iteration in seconds.
Notes
-----
The following routine might be simplified making use of private functions.
However, we decided to expose all the auxiliary routines to properly
reproduce original report figures.
"""
# Check proper value of multi-revolution. Although we could not accept M at
# all, this ensures all routines within the library work under the same
# number and type of parameters.
if M > 0:
raise ValueError(
"Avanzini is not able to work within the multi-revolution scenario!"
)
# Check that input parameters are safe
assert_parameters_are_valid(mu, r1, r2, tof, M)
# Start by computing an auxiliary set of geometric parameters
geometry = get_geometry(r1, r2, prograde)
# Filter out the evolution of ecc_T w.r.t. the independent variable based on
# the transfer angle criteria, see original report flowchart.
eccT_at_x = _get_eccT_at_x(geometry)
# Iterative process based on Newton-Raphson secant method begins. Avanzini
# did not provided a derivative for Kepler's equation of time w.r.t. the ecc_T
# variable, so this root solver is the one required rather than the pure N-R
# one.
tic = time.perf_counter()
x_sol, r = newton(
_f,
0,
args=(eccT_at_x, mu, geometry, tof),
maxiter=maxiter,
tol=atol,
rtol=rtol,
full_output=True,
)
tac = time.perf_counter()
# Extract the number of iterations
numiter = r.iterations
# Compute the time per iteration
tpi = (tac - tic) / numiter
# Solve the actual value of ecc_T at solved x and retrieve COE elements
ecc_T = eccT_at_x(x_sol)
p, ecc, inc, raan, argp, nu_1, nu_2 = coe_at_eccT(ecc_T, r1, r2, prograde)
# Compute the velocity vectors from the classic orbital elements
(_, v1), (_, v2) = [coe2rv(mu, p, ecc, inc, raan, argp, nu) for nu in [nu_1, nu_2]]
return (v1, v2, numiter, tpi) if full_output is True else (v1, v2)
def _get_eccT_limits(geometry):
"""
Computes transverse eccentricity value limits as of problem geometry.
"""
# Solve for the fundamental ellipse properties
r1_norm, r2_norm, c_norm, dtheta, _ = geometry
ecc_F, a_F, p_F = get_fundamental_ellipse_properties(r1_norm, r2_norm, c_norm)
# Compute the limits
ecc_max = -1 / np.cos(dtheta / 2)
ecc_H = np.sqrt(ecc_max ** 2 - ecc_F ** 2) if dtheta > np.pi else np.inf
ecc_P = np.sqrt(1 - ecc_F ** 2)
return ecc_H, ecc_P
def _get_eccT_at_x(geometry):
"""
Returns proper transverse eccentricity function depending on the value
of the transfer angle.
Parameters
----------
ecc_H: float
Lower value limit.
ecc_P: float
Upper value limit.
dtheta: float
Transfer angle.
Returns
-------
eccT_at_x: function
A function to be evaluated at independent variable values.
Notes
-----
These are equations (16) and (18) from the official report [1].
"""
# Compute the limits of the ecc_T value
ecc_H, ecc_P = _get_eccT_limits(geometry)
_, _, _, dtheta, _ = geometry
if dtheta > np.pi:
# Equation (16) is applied
def eccT_at_x(x):
X = np.exp((1 / ecc_H + 1 / ecc_P) * x)
return ecc_P * ecc_H * (X - 1) / (ecc_P + ecc_H * X)
else:
# Equation (18) is applied
def eccT_at_x(x):
return ecc_P * (1 - np.exp(-x / ecc_P))
return eccT_at_x
```
#### File: lamberthub/universal_solvers/vallado.py
```python
import time
import numpy as np
from lamberthub.utils.angles import get_transfer_angle
from lamberthub.utils.assertions import assert_parameters_are_valid
from lamberthub.utils.stumpff import c2, c3
def vallado2013(
mu,
r1,
r2,
tof,
M=0,
prograde=True,
low_path=True,
maxiter=100,
atol=1e-5,
rtol=1e-7,
full_output=False,
):
r"""
Vallado's algorithm makes use of the universal formulation to solve for the
Lambert's problem. By making use of a bisection method, it guarantees the
convergence to the solution but the amount of iterations require
dramatically increases.
Parameters
----------
mu: float
Gravitational parameter, equivalent to :math:`GM` of attractor body.
r1: numpy.array
Initial position vector.
r2: numpy.array
Final position vector.
M: int
Number of revolutions. Must be equal or greater than 0 value.
prograde: bool
If `True`, specifies prograde motion. Otherwise, retrograde motion is imposed.
low_path: bool
If two solutions are available, it selects between high or low path.
maxiter: int
Maximum number of iterations.
atol: float
Absolute tolerance.
rtol: float
Relative tolerance.
full_output: bool
If True, the number of iterations and time per iteration are also returned.
Returns
-------
v1: numpy.array
Initial velocity vector.
v2: numpy.array
Final velocity vector.
numiter: int
Number of iterations.
tpi: float
Time per iteration in seconds.
Notes
-----
This algorithm is presented as an alternative to the one developed by Bate
in 1971. Bate did not impose a particular numerical solver for his algorithm
but cited both bisection and Newton's one. However, for some values of the
boundary problem, the initial guess might diverge if Newton's solver is
used. That's why Vallado decided to employ a bisection method instead.
Although detrimental from the point of view of performance, this algorithm
properly reaches solution in the majority of the cases.
All credits of the implementation go to <NAME> and the
poliastro development team, from which this routine inherits. Some changes
were made to adapt it to `lamberthub` API.
Copyright (c) 2012-2021 <NAME> and the poliastro
development team.
References
----------
[1] <NAME>. (2001). Fundamentals of astrodynamics and applications
(Vol. 12). Springer Science & Business Media.
"""
# Check that input parameters are safe
assert_parameters_are_valid(mu, r1, r2, tof, M)
# Retrieve the fundamental geometry of the problem
r1_norm, r2_norm, c_norm = [np.linalg.norm(vec) for vec in [r1, r2, r2 - r1]]
dtheta = get_transfer_angle(r1, r2, prograde)
# Compute Vallado's transfer angle parameter
A = _get_A(r1_norm, r2_norm, dtheta)
if A == 0.0:
raise RuntimeError("Cannot compute orbit, phase angle is 180 degrees")
# The initial guess and limits for the bisection method
psi, psi_low, psi_up = 0.0, -4 * np.pi ** 2, 4 * np.pi ** 2
tic = time.perf_counter()
for numiter in range(1, maxiter + 1):
# Evaluate the value of y at a given psi
y = _y_at_psi(psi, r1_norm, r2_norm, A)
if A > 0.0:
# Readjust xi_low until y > 0.0
while y < 0.0:
psi_low = psi
psi = (
0.8
* (1.0 / c3(psi))
* (1.0 - (r1_norm * r2_norm) * np.sqrt(c2(psi)) / A)
)
y = _y_at_psi(psi, r1_norm, r2_norm, A)
X = _X_at_psi(psi, y)
tof_new = _tof_vallado(mu, psi, X, A, y)
# Convergence check
if np.abs((tof_new - tof) / tof) < rtol:
tac = time.perf_counter()
tpi = (tac - tic) / numiter
break
# Bisection check
condition = tof_new <= tof
psi_low = psi_low + (psi - psi_low) * condition
psi_up = psi_up + (psi - psi_up) * (not condition)
psi = (psi_up + psi_low) / 2
else:
raise ValueError("Exceeded maximum number of iterations!")
f = 1 - y / r1_norm
g = A * np.sqrt(y / mu)
gdot = 1 - y / r2_norm
v1 = (r2 - f * r1) / g
v2 = (gdot * r2 - r1) / g
return (v1, v2, numiter, tpi) if full_output is True else (v1, v2)
def _tof_vallado(mu, psi, X, A, y):
"""Evaluates universal Kepler's equation.
Parameters
----------
mu: float
The gravitational parameter.
psi: float
The free-parameter or independent variable.
X: float
Auxiliary variable.
A: float
The transfer angle parameter.
y: float
Auxiliary variable.
Returns
-------
tof: float
The computed time of flight.
"""
tof = (X ** 3 * c3(psi) + A * np.sqrt(y)) / np.sqrt(mu)
return tof
def _X_at_psi(psi, y):
"""Computes the value of X at given psi.
Parameters
----------
psi: float
The free-parameter or independent variable.
y: float
Auxiliary variable.
Returns
-------
X: float
Auxiliary variable.
"""
X = np.sqrt(y / c2(psi))
return X
def _get_A(r1_norm, r2_norm, dtheta):
"""Computes the value of the A constant.
Parameters
----------
r1_norm: float
Initial position vector norm.
r2_norm: float
Final position vector norm.
dtheta: float
The transfer angle in radians.
Returns
-------
A: float
The transfer angle parameter.
"""
t_m = 1 if dtheta < np.pi else -1
A = t_m * (r1_norm * r2_norm * (1 + np.cos(dtheta))) ** 0.5
return A
def _y_at_psi(psi, r1_norm, r2_norm, A):
"""Evaluates the value of y at given psi.
Parameters
----------
psi: float
The free-parameter or independent variable.
r1_norm: float
Initial position vector norm.
r2_norm: float
Final position vector norm.
A: float
The transfer angle parameter.
Returns
-------
y: float
Auxiliary variable.
Notes
-----
This is equation (7-59) simplified, similarly as made in [1].
"""
y = (r1_norm + r2_norm) + A * (psi * c3(psi) - 1) / c2(psi) ** 0.5
return y
```
#### File: lamberthub/utils/angles.py
```python
import numpy as np
from numpy import cross, dot
from numpy.linalg import norm
def get_transfer_angle(r1, r2, prograde):
"""
Solves for the transfer angle being known the sense of rotation.
Parameters
----------
r1: np.array
Initial position vector.
r2: np.array
Final position vector.
prograde: bool
If True, it assumes prograde motion, otherwise assumes retrograde.
Returns
-------
dtheta: float
Transfer angle in radians.
"""
# Check if both position vectors are collinear. If so, check if the transfer
# angle is 0 or pi.
if np.all(np.cross(r1, r2) == 0):
return 0 if np.all(np.sign(r1) == np.sign(r2)) else np.pi
# Solve for a unitary vector normal to the vector plane. Its direction and
# sense the one given by the cross product (right-hand) from r1 to r2.
h = cross(r1, r2) / norm(np.cross(r1, r2))
# Compute the projection of the normal vector onto the reference plane.
alpha = dot(np.array([0, 0, 1]), h)
# Get the minimum angle (0 <= dtheta <= pi) between r1 and r2.
r1_norm, r2_norm = [norm(vec) for vec in [r1, r2]]
theta0 = np.arccos(dot(r1, r2) / (r1_norm * r2_norm))
# Fix the value of theta if necessary
if prograde is True:
dtheta = theta0 if alpha > 0 else 2 * np.pi - theta0
else:
dtheta = theta0 if alpha < 0 else 2 * np.pi - theta0
return dtheta
def get_orbit_normal_vector(r1, r2, prograde):
"""
Computes a unitary normal vector aligned with the specific angular momentum
one of the orbit.
Parameters
----------
r1: np.array
Initial position vector.
r2: np.array
Final position vector.
prograde: bool
If True, it assumes prograde motion, otherwise assumes retrograde.
Returns
-------
i_h: np.array
Unitary vector aligned with orbit specific angular momentum.
"""
# Compute the normal vector and its projection onto the vertical axis
i_h = np.cross(r1, r2) / norm(np.cross(r1, r2))
# Solve the projection onto the positive vertical direction of the
# fundamental plane.
alpha = dot(np.array([0, 0, 1]), i_h)
# An prograde orbit always has a positive vertical component of its specific
# angular momentum. Therefore, we just need to check for this condition
if prograde is True:
i_h = i_h if alpha > 0 else -i_h
else:
i_h = i_h if alpha < 0 else -i_h
return i_h
def get_orbit_inc_and_raan_from_position_vectors(r1, r2, prograde):
"""
Computes the inclination of the orbit being known an initial and a final
position vectors together with the sense of motion.
Parameters
----------
r1: np.array
Initial position vector.
r2: np.array
Final position vector.
prograde: bool
If True, it assumes prograde motion, otherwise assumes retrograde.
Returns
-------
inc: float
Inclination of the orbit.
raan: float
Right ascension of the ascending node.
"""
# Get a unitary vector aligned in direction and sense with the specific
# angular momentum one.
i_h = get_orbit_normal_vector(r1, r2, prograde)
# Define the unitary vector along Z-axis of the fundamental plane
i_K = np.array([0, 0, 1])
# If the orbit is coplanar with fundamental plane, neither inc or raan are
# defined. TODO: use atol and rtol instead of pure floating zero comparison
if i_h[0] == 0 and i_h[1] == 0:
inc, raan = 0, 0
else:
# Inclination is always bounded between [0, pi], so no correction is
# needed
inc = np.arccos(i_h[2] / norm(i_h))
# Compute the RAAN using a vector in the direction and sense of the line
# of nodes. Because RAAN is bounded between [0, 2pi], the arctan2
# function is used.
n = cross(i_K, i_h)
raan = np.arctan2(n[1], n[0]) % (2 * np.pi)
return inc, raan
def nu_to_E(nu, ecc):
"""
Retrieves eccentric anomaly from true one.
Parameters
----------
nu: float
True anomaly.
ecc: float
Eccentricity of the orbit.
Returns
-------
E: float
Eccentric anomaly.
"""
E = 2 * np.arctan(np.sqrt((1 - ecc) / (1 + ecc)) * np.tan(nu / 2))
return E
def E_to_nu(E, ecc):
"""
Retrieves true anomaly from eccentric one.
Parameters
----------
E: float
Eccentric anomaly.
ecc: float
Eccentricity of the orbit.
Returns
-------
nu: float
True anomaly.
"""
nu = 2 * np.arctan(np.sqrt((1 + ecc) / (1 - ecc)) * np.tan(E / 2))
return nu
def nu_to_B(nu):
"""
Retrieves parabolic anomaly from true one.
Parameters
----------
nu: float
True anomaly
Returns
-------
B: float
Parabolic anomaly
Notes
-----
As explained in Vallado's [1], :math:`B` is used instead of :math:`P` just
to not confuse with the orbital parameter.
"""
B = np.tan(nu / 2)
return B
def B_to_nu(B):
"""
Retrieves the true anomaly from parabolic one.
Parameters
----------
B: float
Parabolic anomaly
Returns
-------
nu: float
True anomaly
Notes
-----
As explained in Vallado's [1], :math:`B` is used instead of :math:`P` just
to not confuse with the orbital parameter.
"""
nu = 2 * np.arctan(B)
return nu
def nu_to_H(nu, ecc):
"""
Retrieves hyperbolic anomaly from true one.
Parameters
----------
nu: float
True anomaly
ecc: float
Eccentricity of the orbit
Returns
-------
H: float
Hyperbolic anomaly
"""
H = 2 * np.arctanh(np.sqrt((ecc - 1) / (ecc + 1)) * np.tan(nu / 2))
return H
def H_to_nu(H, ecc):
"""
Retrieves hyperbolic anomaly from true one.
Parameters
----------
H: float
Hyperbolic anomaly
ecc: float
Eccentricity of the orbit
Returns
-------
nu: float
True anomaly
"""
nu = 2 * np.arctan(np.sqrt((ecc + 1) / (ecc - 1)) * np.tanh(H / 2))
return nu
```
#### File: lamberthub/utils/misc.py
```python
import numpy as np
from lamberthub.utils.elements import rotation_matrix
def get_solver_name(solver):
"""
Returns solver's name.
Parameters
----------
solver: function
Solver function.
Returns
-------
name: str
String representation for the solver.
"""
# Get the function name and its length
raw_name = str(solver.__name__.capitalize())
len_name = len(raw_name)
# Break into author name and implementation year
author_name = raw_name[0 : (len_name - 4)]
year_name = raw_name[(len_name - 4) :]
# Assemble and return the solver's name
name = author_name + " " + year_name
return name
def _get_sample_vectors_from_theta_and_rho(theta, rho):
"""Returns the initial and final position vectors contained in the reference
XY plane being given the transfer angle and the norm ration between them.
Parameters
----------
theta: float
The transfer angle.
rho: float
The ratio between the norms of the final and initial vectors.
Returns
-------
r1_vec: ~np.array
The initial position vector.
r2_vec: ~np.array
The final position vector.
Notes
-----
The purpose of this function is to easily generate initial data for the
Lambert's problem in the sense of position vectors. The function is used by
the performance plotters and to generate various time of flight curves for
the different available algorithms.
"""
# Generate final position vector by rotating initial one a given theta
r1_vec = np.array([1, 0, 0])
r2_vec = rho * rotation_matrix(theta, axis=2) @ r1_vec
return (r1_vec, r2_vec)
```
#### File: lamberthub/tests/test_multirev_solvers.py
```python
import numpy as np
import pytest
from numpy.testing import assert_allclose
from lamberthub import MULTI_REV_SOLVERS
TABLE_OF_TRANSFERS = {
"M1_prograde_high": [
np.array([0.50335770, 0.61869408, -1.57176904]), # [km / s]
np.array([-4.18334626, -1.13262727, 6.13307091]), # [km / s]
],
"M1_prograde_low": [
np.array([-2.45759553, 1.16945801, 0.43161258]), # [km / s]
np.array([-5.53841370, 0.01822220, 5.49641054]), # [km / s]
],
"M1_retrograde_high": [
np.array([1.33645655, -0.94654565, 0.30211211]), # [km / s]
np.array([4.93628678, 0.39863416, -5.61593092]), # [km / s]
],
"M1_retrograde_low": [
np.array([-1.38861608, -0.47836611, 2.21280154]), # [km / s]
np.array([3.92901545, 1.50871943, -6.52926969]), # [km / s]
],
}
"""
Directly taken from example 1 from The Superior Lambert Algorithm (Der
Astrodynamics), by <NAME>, see https://amostech.com/TechnicalPapers/2011/Poster/DER.pdf
"""
@pytest.mark.parametrize("solver", MULTI_REV_SOLVERS)
@pytest.mark.parametrize("case", TABLE_OF_TRANSFERS)
def test_multirev_case(solver, case):
# Initial conditions
mu_earth = 3.986004418e5 # [km ** 3 / s ** 2]
r1 = np.array([22592.145603, -1599.915239, -19783.950506]) # [km]
r2 = np.array([1922.067697, 4054.157051, -8925.727465]) # [km]
tof = 36000 # [s]
# Unpack problem conditions
M, sense, path = case.split("_")
# Convert proper type
M = int(M[-1])
sense = True if sense == "prograde" else False
path = True if path == "low" else False
# Solve the problem
v1, v2 = solver(mu_earth, r1, r2, tof, M=M, prograde=sense, low_path=path)
# Expected final results
expected_v1, expected_v2 = TABLE_OF_TRANSFERS[case]
# Assert the results
assert_allclose(v1, expected_v1, rtol=5e-6)
assert_allclose(v2, expected_v2, rtol=5e-6)
@pytest.mark.parametrize("solver", MULTI_REV_SOLVERS)
def test_exception_try_lower_M(solver):
""" Test that solver does not find any solution for particular input """
# Initial conditions
mu_earth = 3.986004418e5 # [km ** 3 / s ** 2]
r1 = np.array([22592.145603, -1599.915239, -19783.950506]) # [km]
r2 = np.array([1922.067697, 4054.157051, -8925.727465]) # [km]
tof = 5 * 3600 # [s]
with pytest.raises(ValueError) as excinfo:
solver(mu_earth, r1, r2, tof, M=1)
assert "ValueError: No feasible solution, try lower M!" in excinfo.exconly()
```
#### File: tests/tests_plotting/test_base.py
```python
import numpy as np
from numpy.testing import assert_allclose
from lamberthub.p_solvers.gauss import gauss1809
from lamberthub.plotting._base import _measure_performance
def test_measure_performance_handles_iteration_exceptions():
results = _measure_performance(gauss1809, np.pi / 2, 2 * np.pi)
for value in results:
assert_allclose(value, 0.0)
```
#### File: tests/tests_plotting/test_time_plotter.py
```python
import matplotlib.pyplot as plt
import pytest
from lamberthub import gooding1990
from lamberthub.plotting import TPIPlotter, TTCPlotter
@pytest.mark.mpl_image_compare
def test_time_per_iteration_performance_plotter():
fig, ax = plt.subplots()
ipp = TPIPlotter(ax=ax, fig=fig)
ipp.plot_performance(gooding1990, N_samples=3)
return fig
@pytest.mark.mpl_image_compare
def test_total_time_performance_plotter():
fig, ax = plt.subplots()
ttc = TTCPlotter(ax=ax, fig=fig)
ttc.plot_performance(gooding1990, N_samples=3, maxttc=3000)
return fig
```
#### File: tests/tests_universal_solvers/test_izzo.py
```python
import pytest
from lamberthub.universal_solvers import izzo
@pytest.mark.parametrize("M", [1, 2, 3])
def test_minimum_time_of_flight_convergence(M):
ll = -1
x_T_min_expected, T_min_expected = izzo._compute_T_min(
ll, M, maxiter=10, atol=1e-8, rtol=1e-10
)
y = izzo._compute_y(x_T_min_expected, ll)
T_min = izzo._tof_equation_y(x_T_min_expected, y, 0.0, ll, M)
assert T_min_expected == T_min
```
#### File: tests/tests_utils/test_angles.py
```python
import numpy as np
import pytest
from numpy.testing import assert_allclose
from lamberthub.utils.angles import B_to_nu, E_to_nu, H_to_nu, get_transfer_angle
@pytest.mark.parametrize("sense", [True, False])
def test_get_transfer_angle_collinear_vectors_same_sense(sense):
# Build two vectors with the same direction and sense using scalar
# proportion
r1 = np.array([1, 0, 0])
r2 = r1 if sense is True else -r1
# Compute the transfer angle
dtheta = get_transfer_angle(r1, r2, True)
expected_dtheta = 0 if sense is True else np.pi
# Check that transfer angle is zero
assert_allclose(dtheta, expected_dtheta)
TABLE_OF_SOLUTIONS = {
"elliptic": [0.80521, 1.00220, 0.24649], # [E, nu, ecc] from Curtis 3.3
"parabolic": [3.1481, 2.526364092261792, 1], # [B, nu, ecc] from Curtis 3.4
"hyperbolic": [2.2927, 1.7453292519943295, 2.7696], # [H, nu, ecc] from Curtis 3.5
}
@pytest.mark.parametrize("orbit_type", TABLE_OF_SOLUTIONS)
def test_from_orbit_anomaly_to_true_anomaly(orbit_type):
# Unpack expected values
orbit_anomaly, expected_nu, ecc = TABLE_OF_SOLUTIONS[orbit_type]
# Solve for the predicted numerical value
if 0 <= ecc < 1:
nu = E_to_nu(orbit_anomaly, ecc)
elif ecc == 1:
nu = B_to_nu(orbit_anomaly)
else:
nu = H_to_nu(orbit_anomaly, ecc)
# Check values match
assert_allclose(nu, expected_nu, rtol=1e-4)
``` |
{
"source": "jorgepiloto/scikit-aero",
"score": 3
} |
#### File: skaero/atmosphere/util.py
```python
from __future__ import absolute_import, division
import numpy as np
# effective earth's radius
R_Earth = 6356.7660e3 # m
def geometric_to_geopotential(z):
"""Returns geopotential altitude from geometric altitude.
Parameters
----------
z : array_like
Geometric altitude in meters.
Returns
-------
h : array_like
Geopotential altitude in meters.
"""
h = np.asarray(z) * R_Earth / (np.asarray(z) + R_Earth)
return h
def geopotential_to_geometric(h):
"""Returns geometric altitude from geopotential altitude.
Parameters
----------
h : array_like
Geopotential altitude in meters.
Returns
-------
z : array_like
Geometric altitude in meters.
Notes
-----
Based on eq. 19 of the `U.S. 1976 Standard Atmosphere`_.
.. _`U.S. 1976 Standard Atmosphere`: http://ntrs.nasa.gov/search.jsp?R=1977\0009539
"""
z = np.asarray(h) * R_Earth / (R_Earth - np.asarray(h))
return z
```
#### File: skaero/gasdynamics/isentropic.py
```python
from __future__ import absolute_import, division
import numpy as np
import scipy as sp
from scipy.optimize import bisect, newton
from skaero.util.decorators import implicit
def mach_angle(M):
r"""Returns Mach angle given supersonic Mach number.
.. math::
\mu = \arcsin{\left ( \frac{1}{M} \right )}
Parameters
----------
M : float
Mach number.
Returns
-------
mu : float
Mach angle.
Raises
------
ValueError
If given Mach number is subsonic.
"""
try:
with np.errstate(invalid="raise"):
mu = np.arcsin(1 / M)
except FloatingPointError:
raise ValueError("Mach number must be supersonic")
return mu
def mach_from_area_ratio(A_Astar, fl=None):
"""Computes the Mach number given an area ratio asuming isentropic flow.
Uses the relation between Mach number and area ratio for isentropic flow,
and returns both the subsonic and the supersonic solution.
Parameters
----------
A_Astar : float
Cross sectional area.
fl : IsentropicFlow, optional
Isentropic flow object, default flow with gamma = 7 / 5.
Returns
-------
out : tuple of floats
Subsonic and supersonic Mach number solution of the equation.
Raises
------
ValueError
If the area ratio is less than 1.0 (the critical area is always the
minimum).
"""
if not fl:
fl = IsentropicFlow(gamma=1.4)
eq = implicit(fl.A_Astar)
if A_Astar < 1.0:
raise ValueError("Area ratio must be greater than 1")
elif A_Astar == 1.0:
M_sub = M_sup = 1.0
else:
M_sub = bisect(eq, 0.0, 1.0, args=(A_Astar,))
M_sup = newton(eq, 2.0, args=(A_Astar,))
return M_sub, M_sup
def mach_from_nu(nu, in_radians=True, gamma=1.4):
r"""Computes the Mach number given a Prandtl-Meyer angle, :math:`\nu`.
Uses the relation between Mach number and Prandtl-Meyer angle for
isentropic flow, to iteratively compute and return the Mach number.
Parameters
----------
nu : float
Prandtl-Meyer angle, by default in radians.
in_radians : bool, optional
When set as False, converts nu from degrees to radians.
gamma : float, optional
Specific heat ratio.
Returns
-------
M : float
Mach number corresponding to :math:`\nu`.
Raises
------
ValueError
If :math:`\nu` is 0 or negative or above the theoretical maxima based on
:math:`\gamma`.
"""
if not in_radians:
nu = np.radians(nu)
nu_max = np.pi / 2.0 * (np.sqrt((gamma + 1.0) / (gamma - 1.0)) - 1)
if nu <= 0.0 or nu >= nu_max:
raise ValueError(
"Prandtl-Meyer angle must be between (0, %f) radians." % nu_max
)
eq = implicit(PrandtlMeyerExpansion.nu)
M = newton(eq, 2.0, args=(nu,))
return M
class IsentropicFlow(object):
"""Class representing an isentropic gas flow.
Isentropic flow is characterized by:
* Viscous and heat conductivity effects are negligible.
* No chemical or radioactive heat production.
"""
def __init__(self, gamma=1.4):
"""Constructor of IsentropicFlow.
Parameters
----------
gamma : float, optional
Specific heat ratio, default 7 / 5.
"""
self.gamma = gamma
def p_p0(self, M):
r"""Pressure ratio from Mach number.
.. math::
\left ( \frac{P}{P_{0}} \right ) = \left ( \frac{T}{T_{0}} \right )^{\frac{\gamma}{(\gamma - 1)}}
Parameters
----------
M : array_like
Mach number.
Returns
-------
p_p0 : array_like
Pressure ratio.
"""
M = np.asanyarray(M)
p_p0 = self.T_T0(M) ** (self.gamma / (self.gamma - 1))
return p_p0
def rho_rho0(self, M):
r"""Density ratio from Mach number.
.. math::
\left ( \frac{\rho}{\rho_{0}} \right ) = \left ( \frac{T}{T_{0}} \right )^{\frac{1}{(\gamma - 1)}}
Parameters
----------
M : array_like
Mach number.
Returns
-------
rho_rho0 : array_like
Density ratio.
"""
M = np.asanyarray(M)
rho_rho0 = self.T_T0(M) ** (1 / (self.gamma - 1))
return rho_rho0
def T_T0(self, M):
r"""Temperature ratio from Mach number.
.. math::
\left ( \frac{T}{T_{0}} \right ) = \left (1 + \frac{\gamma - 1}{2}M^{2} \right )^{-1}
Parameters
----------
M : array_like
Mach number.
Returns
-------
T_T0 : array_like
Temperature ratio.
"""
M = np.asanyarray(M)
T_T0 = 1 / (1 + (self.gamma - 1) * M * M / 2)
return T_T0
def A_Astar(self, M):
"""Area ratio from Mach number.
Duct area divided by critial area given Mach number.
Parameters
----------
M : array_like
Mach number.
Returns
-------
A_Astar : array_like
Area ratio.
"""
M = np.asanyarray(M)
# If there is any zero entry, NumPy array division gives infinity,
# which is correct.
with np.errstate(divide="ignore"):
A_Astar = (2 / self.T_T0(M) / (self.gamma + 1)) ** (
(self.gamma + 1) / (2 * (self.gamma - 1))
) / M
return A_Astar
def a_a0(self, M):
""" Speed of sound ratio from Mach number.
Parameters
----------
M: array_like
Mach number.
Returns
-------
a_a0: array_like
Speed of sound ratio.
"""
M = np.asarray(M)
a_a0 = self.T_T0(M) ** 0.5
return a_a0
class PrandtlMeyerExpansion(object):
"""Class representing a Prandtl-Meyer expansion fan.
"""
@staticmethod
def nu(M, gamma=1.4):
r"""Prandtl-Meyer angle for a given Mach number.
The result is given by evaluating the Prandtl-Meyer function.
.. math::
\nu = \sqrt{\frac{\gamma + 1}{\gamma - 1}} \tan^{-1}\left [ \sqrt{\frac{\gamma - 1}{\gamma + 1}(M^{2} - 1)} \right ] - \tan^{-1}(\sqrt{M^{2} - 1})
Parameters
----------
M : float
Mach number.
gamma : float, optional
Specific heat ratio, default 7 / 5.
Returns
-------
nu : float
Prandtl-Meyer angle, in radians.
Raises
------
ValueError
If Mach number is subsonic.
"""
try:
with np.errstate(invalid="raise"):
sgpgm = np.sqrt((gamma + 1) / (gamma - 1))
nu = sgpgm * np.arctan(np.sqrt(M * M - 1) / sgpgm) - np.arctan(
np.sqrt(M * M - 1)
)
except FloatingPointError:
raise ValueError("Mach number must be supersonic")
return nu
def __init__(self, M_1, theta, fl=None, gamma=1.4):
"""Constructor of PrandtlMeyerExpansion.
Parameters
----------
M_1 : float
Upstream Mach number.
theta : float
Deflection angle, in radians.
fl : IsentropicFlow, optional.
Flow to be expanded
gamma : float, optional
Specific heat ratio, default value = 7 / 5.
Raises
------
ValueError
If given Mach number is subsonic.
"""
if not fl:
fl = IsentropicFlow(gamma=gamma)
nu_max = PrandtlMeyerExpansion.nu(np.inf, fl.gamma) - PrandtlMeyerExpansion.nu(
M_1, fl.gamma
)
if theta > nu_max:
raise ValueError(
"Deflection angle must be lower than maximum {:.2f}°".format(
np.degrees(nu_max)
)
)
self.M_1 = M_1
self.theta = theta
self.fl = fl
@property
def nu_1(self):
"""Upstream Prandtl-Meyer angle."""
return PrandtlMeyerExpansion.nu(self.M_1, self.fl.gamma)
@property
def nu_2(self):
"""Downstream Prandtl-Meyer angle."""
return self.nu_1 + self.theta
@property
def M_2(self):
"""Downstream Mach number.
"""
return mach_from_nu(nu=self.nu_2, gamma=self.fl.gamma)
@property
def mu_1(self):
"""Angle of forward Mach line.
"""
return mach_angle(self.M_1)
@property
def mu_2(self):
"""Angle of rearward Mach line.
"""
return mach_angle(self.M_2)
@property
def p2_p1(self):
"""Pressure ratio across the expansion fan.
"""
p2_p1 = self.fl.p_p0(self.M_2) / self.fl.p_p0(self.M_1)
return p2_p1
@property
def T2_T1(self):
"""Temperature ratio across the expansion fan.
"""
T2_T1 = self.fl.T_T0(self.M_2) / self.fl.T_T0(self.M_1)
return T2_T1
@property
def rho2_rho1(self):
"""Density ratio across the expansion fan.
"""
return self.p2_p1 / self.T2_T1
``` |
{
"source": "jorgepvasconcelos/scraping-the-world",
"score": 2
} |
#### File: scraping_the_world/models/querys.py
```python
import traceback
from scraping_the_world.models.utils import DataBase
def add_log(log_text: str, log_type: str) -> None:
try:
DataBase.execute(query="INSERT INTO logs (log_text, log_type) VALUE (%s, %s)", arguments=[log_text, log_type])
except:
traceback.print_exc()
def get_config(name: str):
value = DataBase.consult_one(query="select value from configs where name = %s; ", arguments=[name])
value = value['value']
return value
```
#### File: scraping_the_world/scrapers/americanas.py
```python
import traceback
from selenium.webdriver.common.by import By
from requests_html import HTMLSession
from parsel import Selector
from scraping_the_world.models.querys import add_log, get_config
from scraping_the_world.scrapers.webdriver_manager.webdriver_manager import WebdriverManager
from scraping_the_world.exceptions.scrapers_exceptions import SiteWhithoutDataError, PageNotFound404Error
class ScrapingAmericanas:
def __init__(self, url):
self.__url = url
self.__site_data = {
'titulo': None, 'imagem': None, 'preco': None, 'descricao': None, 'url': None, 'error': False}
def consult(self):
scraping_type = int(get_config('scraping_americanas'))
webdriver_manager = None
try:
if scraping_type == 0:
webdriver_manager = WebdriverManager()
webdriver_manager.create_driver()
self.__scraping_selenium()
elif scraping_type == 1:
self.__scraping_requests()
except PageNotFound404Error as error:
self.__site_data['error'] = error
except SiteWhithoutDataError as error:
self.__site_data['error'] = error
except Exception as error:
add_log(log_text=f'[scraping_americanas] Traceback: {error}', log_type='ERROR')
self.__site_data['error'] = error
finally:
if webdriver_manager:
webdriver_manager.driver_quit()
return self.__site_data
def __scraping_requests(self):
session = HTMLSession()
response = session.get(self.__url).text
parsel_selector = Selector(text=response)
selector = '.product-title__Title-sc-1hlrxcw-0::text'
self.__site_data['titulo'] = parsel_selector.css(selector).get()
selector = 'div[class="main-image__Container-sc-1i1hq2n-1 iCNHlx"]>div>picture>img::attr(src)'
self.__site_data['imagem'] = parsel_selector.css(selector).get()
selector = '.priceSales::text'
self.__site_data['preco'] = parsel_selector.css(selector).get()
selector = '.product-description__Description-sc-ytj6zc-1::text'
descricao = parsel_selector.css(selector).get()
self.__site_data['descricao'] = descricao if descricao else 'No Description'
selector = 'head>[property="al:web:url"]::attr(content)'
self.__site_data['url'] = parsel_selector.css(selector).get()
return self.__site_data
def __scraping_selenium(self):
driver, wdtk = WebdriverManager().get_driver()
driver.get(self.__url)
selector = '.product-title__Title-sc-1hlrxcw-0'
if wdtk.element_is_present(wait_time=10, locator=(By.CSS_SELECTOR, selector)):
self.__site_data['titulo'] = driver.find_element(By.CSS_SELECTOR, selector).text
else:
raise SiteWhithoutDataError()
selector = 'div[class="main-image__Container-sc-1i1hq2n-1 iCNHlx"]>div>picture>img'
if wdtk.element_is_present(wait_time=10, locator=(By.CSS_SELECTOR, selector)):
self.__site_data['imagem'] = driver.find_element(By.CSS_SELECTOR, selector).get_attribute('src')
else:
raise SiteWhithoutDataError()
selector = '.priceSales'
if wdtk.element_is_present(wait_time=10, locator=(By.CSS_SELECTOR, selector)):
self.__site_data['preco'] = driver.find_element(By.CSS_SELECTOR, selector).text
else:
raise SiteWhithoutDataError()
selector = '.product-description__Description-sc-ytj6zc-1'
if wdtk.element_is_present(wait_time=10, locator=(By.CSS_SELECTOR, selector)):
descricao = driver.find_element(By.CSS_SELECTOR, selector).text
self.__site_data['descricao'] = descricao if descricao else 'No Description'
else:
self.__site_data['descricao'] = 'No Description'
self.__site_data['url'] = driver.current_url
return self.__site_data
if __name__ == '__main__':
...
scraping_result = ScrapingAmericanas('https://www.americanas.com.br/produto/3068486001').consult() # no description
# scraping_result = scraping_americanas('https://www.americanas.com.br/produto/2896992161') # with description
print(scraping_result)
```
#### File: scrapers/webdriver_manager/webdriver_manager.py
```python
import os
import traceback
from selenium import webdriver
from selenium_stealth import stealth
from scraping_the_world.scrapers.webdriver_manager.webdriver_toolkit import WebDriverToolKit
from scraping_the_world.models.querys import add_log
from env import ENV
WEBDRIVERS_PATH = os.path.dirname(os.path.realpath(__file__))
class SingletonMeta(type):
"""
The Singleton class can be implemented in different ways in Python. Some
possible methods include: base class, decorator, metaclass. We will use the
metaclass because it is best suited for this purpose.
"""
_instances = {}
def __call__(cls, *args, **kwargs):
"""
Possible changes to the value of the `__init__` argument do not affect
the returned instance.
"""
if cls not in cls._instances:
instance = super().__call__(*args, **kwargs)
cls._instances[cls] = instance
return cls._instances[cls]
class WebdriverManager(metaclass=SingletonMeta):
def __init__(self):
self.__driver = None
def get_driver(self):
if not self.__driver:
self.__driver = self.create_driver()
wdtk = WebDriverToolKit(self.__driver)
return self.__driver, wdtk
def create_driver(self):
try:
options = self.__get_options()
if ENV['ENV'] == 'DEV':
if int(ENV['SELENIUM_REMOTE']) == 1:
self.__driver = webdriver.Remote(command_executor='http://localhost:4444/wd/hub', options=options)
else:
webdriver_path = f'{WEBDRIVERS_PATH}\\chromedriver'
self.__driver = webdriver.Chrome(executable_path=webdriver_path, options=options)
stealth(driver=self.__driver,
languages=["en-US", "en"],
vendor="Google Inc.",
platform="Win32",
webgl_vendor="Intel Inc.",
renderer="Intel Iris OpenGL Engine",
fix_hairline=True,
)
else:
self.__driver = webdriver.Remote(command_executor='http://container_selenium:4444/wd/hub', options=options)
self.__driver.maximize_window()
return self.__driver
except:
traceback.print_exc()
add_log(log_text=f'[WebdriverManager] Traceback: {traceback.format_exc()}', log_type='ERROR')
@staticmethod
def __get_options():
options = webdriver.ChromeOptions()
options.add_argument('--ignore-ssl-errors=yes')
options.add_argument('--ignore-certificate-errors')
options.add_argument('--incognito')
options.add_argument('--no-sandbox')
options.add_argument('--disable-blink-features=AutomationControlled')
options.add_argument('--enable-automation')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--disable-extensions')
options.add_argument('--disable-gpu')
options.add_experimental_option('useAutomationExtension', False)
options.add_experimental_option("excludeSwitches", ["enable-automation"])
# options.add_argument(
# '--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.69 Safari/537.36')
# options.add_argument('--headless')
return options
def driver_quit(self):
try:
if self.__driver:
self.__driver.quit()
self.__driver = None
except:
traceback.print_exc()
add_log(log_text=f'[WebdriverManager] Traceback: {traceback.format_exc()}', log_type='ERROR')
```
#### File: tests/helpers/helper.py
```python
def response_is_not_empty(data_dict: dict) -> bool:
for value in data_dict.values():
if value is None:
break
return True
return False
``` |
{
"source": "jorgeramirezcarrasco/catastro-finder",
"score": 3
} |
#### File: jorgeramirezcarrasco/catastro-finder/catastro_finder.py
```python
import requests
import json
import re
from bs4 import BeautifulSoup
class CatastroFinder:
"""CatastroFinder"""
def __init__(self,catastro_dict_path=None):
"""
Args:
catastro_dict_path (str, optional): Json file with catastro urls to scrap. Defaults to "./catastro_artifacts.json".
"""
if catastro_dict_path:
with open(catastro_dict_path) as json_file:
self.catastro_dict=json.load(json_file)
else:
self.catastro_dict={
"provincias": {
"url": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx/ObtenerProvincias",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"accept": "application/json, text/javascript, */*; q=0.01",
"x-requested-with": "XMLHttpRequest",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"content-type": "application/json; charset=UTF-8",
"origin": "https://www1.sedecatastro.gob.es",
"sec-fetch-site": "same-origin",
"sec-fetch-mode": "cors",
"sec-fetch-dest": "empty",
"referer": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx?from=NuevoVisor",
"accept-language": "es-ES,es;q=0.9"
}
},
"municipios": {
"url": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx/ObtenerMunicipios",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"accept": "application/json, text/javascript, */*; q=0.01",
"x-requested-with": "XMLHttpRequest",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"content-type": "application/json; charset=UTF-8",
"origin": "https://www1.sedecatastro.gob.es",
"sec-fetch-site": "same-origin",
"sec-fetch-mode": "cors",
"sec-fetch-dest": "empty",
"referer": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx?from=NuevoVisor",
"accept-language": "es-ES,es;q=0.9"
}
},
"vias": {
"url": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx/ObtenerVias",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"accept": "application/json, text/javascript, */*; q=0.01",
"x-requested-with": "XMLHttpRequest",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"content-type": "application/json; charset=UTF-8",
"origin": "https://www1.sedecatastro.gob.es",
"sec-fetch-site": "same-origin",
"sec-fetch-mode": "cors",
"sec-fetch-dest": "empty",
"referer": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCBusqueda.aspx?from=NuevoVisor",
"accept-language": "es-ES,es;q=0.9"
}
},
"inmuebles": {
"url": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCListaBienes.aspx",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"cache-control": "max-age=0",
"upgrade-insecure-requests": "1",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
"sec-fetch-user": "?1",
"sec-fetch-dest": "document",
"accept-language": "es-ES,es;q=0.9"
}
},
"cp": {
"url": "https://www1.sedecatastro.gob.es/CYCBienInmueble/OVCConCiud.aspx",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"cache-control": "max-age=0",
"upgrade-insecure-requests": "1",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
"sec-fetch-user": "?1",
"sec-fetch-dest": "document",
"accept-language": "es-ES,es;q=0.9"
}
},
"lat_long": {
"url": "https://www1.sedecatastro.gob.es/Cartografia/BuscarParcelaInternet.aspx",
"headers": {
"authority": "www1.sedecatastro.gob.es",
"cache-control": "max-age=0",
"upgrade-insecure-requests": "1",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.198 Safari/537.36",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
'sec-fetch-site': 'same-origin',
"sec-fetch-user": "?1",
"sec-fetch-dest": "document",
"accept-language": "es-ES,es;q=0.9"
}
}
}
def get_provincias(self,filtro=""):
"""get_provincias
Args:
filtro (str, optional): Filtro. Defaults to "".
Returns:
(list): List of items with Codigo and Denominacion. ['Codigo': 15, 'Denominacion': 'A CORUÑA'}...]
"""
url=self.catastro_dict["provincias"]["url"]
headers=self.catastro_dict["provincias"]["headers"]
payload = "{ 'filtro': '"+filtro+"'}"
response = requests.request("POST", url, headers=headers, data = payload)
return json.loads(response.content)['d']
def get_municipios(self,provincia):
"""get_municipios
Args:
provincia (str): Provincia code to search.
Returns:
(list): List of items with Codigo and Denominacion. ['Codigo': 121, 'Denominacion': 'SANTA POLA'}...]
"""
url=self.catastro_dict["municipios"]["url"]
headers=self.catastro_dict["municipios"]["headers"]
payload = "{\"filtro\":\"\",\"provincia\":"+str(provincia)+"}"
response = requests.request("POST", url, headers=headers, data = payload)
return json.loads(response.content)['d']
def get_vias(self,provincia,municipio,input_via):
"""get_vias
Args:
provincia (str): Provincia code to search.
municipio (str): Municipio code to search.
input_via (str): Via input to search.
Returns:
(list): List of items with Codigo, Sigla, TipoVia, DenominacionCompleta and Denominacion. {'Codigo': 1212, 'Sigla': 'CL', 'TipoVia': 'CALLE', 'Denominacion': 'SANTA CRISTINA', 'DenominacionCompleta': 'SANTA CRISTINA (CALLE)'}
"""
url=self.catastro_dict["vias"]["url"]
headers=self.catastro_dict["vias"]["headers"]
payload = "{\"filtro\":\""+str(input_via)+"\",\"provincia\":"+str(provincia)+",\"municipio\":"+str(municipio)+"}"
response = requests.request("POST", url, headers=headers, data = payload)
return json.loads(response.content)['d']
def search_inmueble(self,via_result,via_numero,selected_provincia,selected_municipio,tipur="U",pest="urbana"):
"""search inmueble
Args:
via_result (dict): [description]
via_numero (str): [description]
selected_provincia (dict): [description]
selected_municipio ([dict): [description]
tipur (str, optional): tipur. Defaults to "U".
pest (str, optional): pest. Defaults to "urbana".
Returns:
(list): List of inmuebles
"""
url=self.catastro_dict["inmuebles"]["url"]
headers=self.catastro_dict["inmuebles"]["headers"]
via = via_result['Denominacion'].replace(" ","@")
params = (
('via', str(via)),
('tipoVia', str(via_result['Sigla'])),
('numero', str(via_numero)),
('kilometro', ''),
('bloque', ''),
('escalera', ''),
('planta', ''),
('puerta', ''),
('DescProv', str(selected_provincia['Denominacion'])),
('prov', str(selected_provincia['Codigo'])),
('muni', str(selected_municipio['Codigo'])),
('DescMuni', str(selected_municipio['Denominacion'])),
('TipUR', str(tipur)),
('codvia', str(via_result['Codigo'])),
('comVia', str(via_result['DenominacionCompleta'])),
('pest', str(pest)),
('from', 'OVCBusqueda'),
('nomusu', ' '),
('tipousu', ''),
('ZV', 'NO'),
('ZR', 'NO'),
)
response = requests.get(url, headers=headers, params=params)
soup = BeautifulSoup(response.content,features="html.parser")
inmueble_results = soup.find_all("div", "panel-heading")
cleaned_results = []
for inmueble in inmueble_results:
results_item = {}
for element in inmueble.find_all("span"):
if "title" in element.attrs:
if element.attrs["title"] == "Localización":
results_item["Localización"] = element.text
results_item["RefC"] = element.parent.parent.find("b").text.replace(" ","")
if element.attrs["title"] == "Año construcción":
results_item["Año construcción"] = element.text.replace(" ","")
if element.attrs["title"] == "Uso":
results_item["Uso"] = element.text
if element.attrs["title"] == "Coeficiente de participación":
results_item["Coeficiente de participación"] = element.text.replace(" ","")
if element.attrs["title"] == "Superficie construida":
results_item["Superficie construida"] = element.text.replace(" ","")
if results_item:
cleaned_results.append(results_item)
return cleaned_results
def get_cp(self,provincia,municipio,rc,urbrus="U"):
"""get_cp
Args:
provincia (str): Provincia code to search.
municipio (str): Municipio code to search.
rc (str): Ref catastral to search.
urbrus (str, optional): urbrus. Defaults to "U".
Returns:
(str): Postal Code
"""
url=self.catastro_dict["cp"]["url"]
headers=self.catastro_dict["cp"]["headers"]
params = (
('del', str(provincia)),
('mun', str(municipio)),
('UrbRus', str(urbrus)),
('RefC', str(rc)),
('Apenom', ''),
('esBice', ''),
('RCBice1', ''),
('RCBice2', ''),
('DenoBice', ''),
('from', 'nuevoVisor'),
('ZV', 'NO'),
)
response = requests.get(url, headers=headers, params=params)
soup = BeautifulSoup(response.content,features="html.parser")
cp = re.search("\d{5}",soup.find_all("span", "control-label black")[1].get_text(strip=True, separator=" "))[0]
return cp
def get_lat_lon(self, rc):
"""get_lat_lon
Args:
rc (str): Ref catastral to search.
Returns:
(dict): dict with lat and lng
"""
url=self.catastro_dict["lat_long"]["url"]
headers=self.catastro_dict["lat_long"]["headers"]
params = (
('refcat', str(rc)),
)
response = requests.get(url, headers=headers, params=params)
soup = BeautifulSoup(response.content,features="html.parser")
data_form_list = [inp for inp in soup.find_all("input") if 'class' in inp.parent.attrs and 'aspNetHidden' in inp.parent["class"]]
data_form_dict = {}
for data_form in data_form_list:
data_form_dict[data_form.attrs['name']] = data_form.attrs['value']
url=self.catastro_dict["lat_long"]["url"]
headers=self.catastro_dict["lat_long"]["headers"]
params = (
('refcat', str(rc)),
)
data = {
'__VIEWSTATE': data_form_dict['__VIEWSTATE'],
'__VIEWSTATEGENERATOR': data_form_dict['__VIEWSTATEGENERATOR'],
'__EVENTVALIDATION': data_form_dict['__EVENTVALIDATION'],
'ctl00$Contenido$RefCat': str(rc),
'ctl00$Contenido$ImgBGoogleMaps.x': '0',
'ctl00$Contenido$ImgBGoogleMaps.y': '0'
}
response = requests.post(url, headers=headers, params=params, data=data)
soup = BeautifulSoup(response.content,features="html.parser")
lat_long = str(soup.find_all("span", {"id": "ctl00_Contenido_lblAbrirVentana"})[0].find("script")).split("&q=")[-1].split("(")[0].split(",")
return (lat_long[0],lat_long[1])
``` |
{
"source": "jorgercosta/DiscordLandingJobsBot",
"score": 3
} |
#### File: jorgercosta/DiscordLandingJobsBot/bot.py
```python
import os
import discord
import requests
from time import sleep
from dotenv import load_dotenv, set_key
load_dotenv()
TOKEN = os.getenv('TOKEN')
CHANNELID = int(os.getenv('CHANNELID'))
TAGS = os.getenv('TAGS').split(',')
URL = os.getenv('URL')
LASTPUBLISHEDID = int(os.getenv('LASTPUBLISHEDID'))
FETCHINTERVAL = int(os.getenv('FETCHINTERVAL'))
class DiscordClient(discord.Client):
token = ""
channel_id = 0
tags = []
url = ""
last_published_id = 0
fetch_interval = 0
def __init__(self, token: str, channel_id: int, tags: list, url: str, last_published_id: int, fetch_interval: int):
super().__init__()
self.token = token
self.channel_id = channel_id
self.tags = tags
self.url = url
self.last_published_id = last_published_id
self.fetch_interval = fetch_interval
async def on_ready(self):
print('Hello i\'am Bot for Landing Jobs', self.user)
while True:
jobs = Jobs(self.url, self.tags, self.last_published_id)
#print('Found new ' + str(len(jobs)) + 'jobs!')
for job in jobs.get():
print('Sending message for job id:' + str(job['id']))
await self.sendMessage(job)
if job['id'] > self.last_published_id:
self.last_published_id = job['id']
self.__persistLastPublishedId(self.last_published_id)
sleep(self.fetch_interval)
async def sendMessage(self, job: dict):
channel = self.get_channel(self.channel_id)
await channel.send(job['url'])
def __persistLastPublishedId(self, id: int):
print('Persinting LASTPUBLISHEDID==' + str(id))
set_key(os.path.join(os.path.dirname(os.path.realpath(
__file__)), '.env'), 'LASTPUBLISHEDID', str(id))
class Jobs():
tags = []
url = ""
last_published_id = 0
def __init__(self, url: str, tags: list, last_published_id: int):
self.tags = tags
self.url = url
self.last_published_id = last_published_id
def get(self) -> list:
limit = 50
offset = 0
results = []
for offset in range(0, 1000, limit):
url = self.url + '?limit=' + str(limit) + '&offset=' + str(offset)
print(url)
r = requests.get(url)
t = r.json()
if len(r.json()) == 0:
break
results = results+r.json()
results = list(
filter(lambda j: self.__filterByTags(j['tags']), results))
results = list(
filter(lambda j: self.__filterUnPublished(j['id']), results))
return results
def __filterByTags(self, tags: list) -> bool:
for tag in self.tags:
if (tag in tags):
return True
return False
def __filterUnPublished(self, id: int) -> bool:
return id > self.last_published_id
client = DiscordClient(TOKEN, CHANNELID, TAGS, URL,
LASTPUBLISHEDID, FETCHINTERVAL)
client.run(TOKEN)
exit
``` |
{
"source": "jorgeriesco/project",
"score": 2
} |
#### File: project/base/cron.py
```python
from django.contrib.sessions.management.commands import clearsessions
# django cron
from django_cron import CronJobBase
from django_cron import CronJobManager
from django_cron import Schedule
class BaseCronJob(CronJobBase):
@classmethod
def release(cls):
silent = False
with CronJobManager(cls, silent) as manager:
lock = manager.lock_class(cls, manager.silent)
lock.release()
class ClearSessionsCronJob(CronJobBase):
RUN_AT_TIMES = ('03:00',)
schedule = Schedule(
run_at_times=RUN_AT_TIMES,
)
code = 'base.ClearSessionsCronJob'
def do(self):
clearsessions.Command().handle()
``` |
{
"source": "JorgeRinconPerez/EOI",
"score": 2
} |
#### File: JorgeRinconPerez/EOI/setup.py
```python
import io
from os.path import dirname
from os.path import join
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
try:
with io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
) as fh:
return fh.read()
except:
pass
setup(
name='code',
version='0.1.0',
license='Private',
description='A data science project package',
long_description=read('README.md'),
packages=find_packages('code'),
package_dir={'': 'code'}
)
``` |
{
"source": "jorgermurillo/powerapi",
"score": 2
} |
#### File: powerapi/cli/config_validator.py
```python
from typing import Dict
import logging
class ConfigValidator:
@staticmethod
def validate(config: Dict):
if 'verbose' not in config:
config['verbose'] = logging.NOTSET
if 'stream' not in config:
config['stream'] = False
if 'output' not in config:
logging.error("no output configuration found")
return False
for output_type in config['output']:
output_config = config['output'][output_type]
if 'model' not in output_config:
output_config['model'] = 'HWPCReport'
if 'name' not in output_config:
output_config['name'] = 'default_pusher'
if 'input' not in config:
logging.error("no input configuration found")
return False
for input_type in config['input']:
input_config = config['input'][input_type]
if 'model' not in input_config:
input_config['model'] = 'PowerReport'
if 'name' not in input_config:
input_config['name'] = 'default_puller'
return True
```
#### File: powerapi/database/influxdb2.py
```python
import logging
try:
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
#from influxdb import InfluxDBClient
from requests.exceptions import ConnectionError
except ImportError:
logging.getLogger().info("influx_client is not installed.")
from typing import List
from powerapi.database import BaseDB, DBError
from powerapi.report import Report
from powerapi.report_model import ReportModel
class CantConnectToInfluxDB2Exception(DBError):
pass
class InfluxDB2(BaseDB):
"""
MongoDB class herited from BaseDB
Allow to handle a InfluxDB database in reading or writing.
"""
def __init__(self, uri: str, port: int, token: str, org: str, bucket: str):
"""
:param str url: URL of the InfluxDB server
:param int port: port of the InfluxDB server
:param str db_name: database name in the influxdb
(ex: "powerapi")
:param str token access token Needed to connect to the influxdb instance
:param str org org that holds the data (??)
:param str bucket bucket where the data is going to be stored
:param report_model: XXXModel object. Allow to read specific
report with a specific format in a database
:type report_model: powerapi.ReportModel
"""
BaseDB.__init__(self)
self.uri = uri
self.port = port
self.complete_url ="http://%s:%s" %( self.uri , str(self.port))
#self.db_name = db_name
self.token=token
self.org = org
self.org_id = None
self.bucket = bucket
self.client = None
self.write_api= None
def _ping_client(self):
if hasattr(self.client, 'health'):
self.client.health()
else:
self.client.request(url="ping", method='GET', expected_response_code=204)
def connect(self):
"""
Override from BaseDB.
Create the connection to the influxdb database with the current
configuration (hostname/port/db_name), then check if the connection has
been created without failure.
"""
# close connection if reload
if self.client is not None:
self.client.close()
self.client = InfluxDBClient(url=self.complete_url, token=self.token, org=self.org)
#self.client = InfluxDBClient(host=self.uri, port=self.port, database=self.db_name)
# retrieve the org_id
org_api = self.client.organizations_api()
for org_response in org_api.find_organizations():
if org_response.name==self.org:
self.org_id=org_response.id
self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
try:
self._ping_client()
except ConnectionError:
raise CantConnectToInfluxDB2Exception('connexion error')
# Not sure we need to keep the buckeapi object longer than this
bucket_api= self.client.buckets_api()
if bucket_api.find_bucket_by_name(self.bucket)== None:
#If we can't find the bucket, we create it.
bucket_api.create_bucket(bucket_name=self.bucket, org_id=self.org_id)
# We need the org_id in order to create a bucket
#bucket_api.create_database(self.db_name, org_id="")
# TO DO
def save(self, report: Report, report_model: ReportModel):
"""
Override from BaseDB
:param report: Report to save
:param report_model: ReportModel
"""
## Let's print the data to see its schema.
#print("printing report")
#print(report)
#print("Printing serialized report")
#print(report.serialize())
data = report_model.to_influxdb(report.serialize())
self.write_api.write(bucket= this.bucket, record= data)
#self.client.write_points([data])
# TO DO
def save_many(self, reports: List[Report], report_model: ReportModel):
"""
Save a batch of data
:param reports: Batch of data.
:param report_model: ReportModel
"""
data_list = list(map(lambda r: report_model.to_influxdb(r.serialize()), reports))
self.write_api.write(bucket= self.bucket, record= data_list)
```
#### File: test_utils/db/influx.py
```python
import datetime
import pytest
from influxdb import InfluxDBClient
from ..report.power import SENSOR_NAME, TARGET_NAME
INFLUX_URI = 'localhost'
INFLUX_PORT = 8086
INFLUX_DBNAME = 'acceptation_test'
@pytest.fixture()
def influx_database():
client = create_empty_db(INFLUX_URI, INFLUX_PORT)
delete_db(client, INFLUX_DBNAME)
yield client
delete_db(client, INFLUX_DBNAME)
@pytest.fixture()
def influx_database_with_one_power_report():
client = create_non_empty_db(INFLUX_URI, INFLUX_PORT, INFLUX_DBNAME, 1,
SENSOR_NAME, TARGET_NAME)
yield client
delete_db(client, INFLUX_DBNAME)
def generate_power_report(sensor, target, timestamp):
""" generate a power report with json format
"""
return {
'measurement': 'power_consumption',
'tags': {'sensor': sensor,
'target': target},
'time': str(datetime.datetime.fromtimestamp(timestamp)),
'fields': {
'power': 100
}
}
def create_empty_db(url, port):
client = InfluxDBClient(host=url, port=port)
client.ping()
return client
def create_non_empty_db(url, port, db_name, number_of_reports, sensor_name, target_name):
client = create_empty_db(url, port)
client.create_database(db_name)
client.switch_database(db_name)
for i in range(number_of_reports):
client.write_points([generate_power_report(sensor_name, target_name, i)])
return client
def delete_db(client, db_name):
client.drop_database(db_name)
client.close()
def get_all_reports(client, db_name):
client.switch_database(db_name)
result = client.query('SELECT * FROM "power_consumption"')
return list(result.get_points())
``` |
{
"source": "jorgerodriguesdev/appcursossimplesdjango",
"score": 2
} |
#### File: appcursossimplesdjango/core/tests.py
```python
from django.core import mail
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
class HomeViewTest(TestCase):
def test_home_status_code(self):
client = Client()
response = client.get(reverse('core:home'))
self.assertEqual(response.status_code, 200)
def test_home_template_used(self):
client = Client()
response = client.get(reverse('core:home'))
self.assertTemplateUsed(response, 'home.html')
self.assertTemplateUsed(response, 'base.html')
``` |
{
"source": "Jorge-Rodriguez/aiven-monitor-homework",
"score": 2
} |
#### File: aiven-monitor-homework/url_monitor/monitor.py
```python
import json
import logging
import re
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from time import sleep
from confluent_kafka import KafkaException, Producer
from requests import get
from requests.exceptions import RequestException, Timeout
from schema import And, Optional, Schema, Use
from url_monitor.interfaces import Runnable
logging.basicConfig(level=logging.INFO)
class JSONDatetimeEncoder(json.JSONEncoder):
"""JSON encoder with datetime serialization capabilities.
Serializes `datetime.datetime` types as their `isoformat` representation
and `datetime.timedelta` types as their `total_seconds` representation.
"""
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, timedelta):
return obj.total_seconds()
return super(JSONDatetimeEncoder, self).default(obj)
class Monitor(Runnable):
"""URL monitor runnable.
Encapsulates the url monitor execution loop.
Args:
arguments (dict): The configuration dictionary as specified by `CONFIG_SCHEMA`.
Attributes:
config (list): A list of dictionaries specifying the target URLs to monitor,
the monitoring frequencies and the optional regular expressions
to look for on the monitored URL response. The dictionary format
is specified by `CONFIG_SCHEMA`.
producer(confluent_kafka.Producer): A Kafka Producer object.
topic(str): The name of the Kafka topic to send messages to.
"""
CONFIG_SCHEMA = Schema(
{
"kafka": {"connection": dict, "topic": str},
"targets": [
{
"url": str,
"frequency": And(
Use(int), lambda n: n > 0, error="frequency can't be < 1"
),
Optional("regex"): str,
}
],
}
)
RUNNING = True # Busy loop check value
def __init__(self, arguments):
self.logger = logging.getLogger("Monitor")
self.config = arguments["targets"]
self.producer = Producer(**arguments["kafka"]["connection"])
self.topic = arguments["kafka"]["topic"]
def run(self):
"""Main execution scheduler.
A thread pool handles concurrent monitoring of the targets specified in
the `config` attribute. The thread pool allocates as many workers as
targets in the `config` attribute, rounded to the next ten. Each thread
monitors a single target.
"""
self.logger.info("Starting execution loop...")
with ThreadPoolExecutor(
max_workers=len(self.config) + 10 - (len(self.config) % 10)
) as executor:
for target in self.config:
executor.submit(self.monitor, target)
executor.shutdown(wait=True)
def monitor(self, target):
"""Busy monitoring loop.
Implements an infinite loop to monitor a target.
During each iteration of the run loop a target gets queried and the
result is published to the kafka topic specified in the `topic` attribute.
A busy wait loop pauses execution in 1 second intervals until the next
scheduled check time.
The nature of the busy wait loop may cause drift on the check times over
a long period of time.
Args:
target (dict): The target to monitor.
"""
while self.RUNNING:
check_time = datetime.now()
next_check = check_time + timedelta(seconds=target["frequency"])
try:
self.produce(
get(target["url"], timeout=target["frequency"] - 0.5),
target.get("regex"),
check_time,
)
except Timeout:
self.logger.warning("Check for %s timed out", target["url"])
except RequestException as e:
self.logger.error(e)
except re.error as e:
self.logger.error(e)
break
# Busy loop until next check_time
while datetime.now() < next_check:
sleep(1)
def produce(self, response, regex, ts):
"""Kafka message producer.
Prepares and publishes a message to the kafka topic specified in the
`topic` attribute.
Args:
response (requests.Response): The response object from the target check.
regex (str | None): The regular expression to look for in the response body.
ts (datetime.datetime): The timestamp of the target check.
"""
self.logger.info("Producing message...")
payload = {
"url": response.url,
"latency": response.elapsed,
"status": response.status_code,
"check_time": ts,
}
if regex:
try:
payload["regex_match"] = bool(re.search(regex, response.text))
except re.error as e:
raise e
try:
self.producer.produce(
self.topic,
value=json.dumps(payload, cls=JSONDatetimeEncoder),
callback=_log_produced,
)
self.producer.poll(1)
except KafkaException as e:
self.logger.error(
"An error occurred while producing a message: %s", e.args[0].reason
)
def _log_produced(err, msg):
"""Kafka producer callback.
Logs whether a message was properly produced or not.
Args:
err (str): An error message.
msg (str): The produced message.
"""
logger = logging.getLogger("ProducerCallback")
if err is not None:
logger.warning(
"Failed to deliver message at: %s. Error: %s", msg.timestamp(), err
)
else:
logger.info("Produced message at: %s", msg.timestamp())
``` |
{
"source": "jorgeromanespino/PyAspect",
"score": 2
} |
#### File: languages/aql/test_SqlAqlEngine.py
```python
import pytest
#
from aspect.sqlalchemy.languages.aql.AqlEngine import AqlEngine
from aspect.sqlalchemy.languages.aql.Translator import Translator as SqlAlchemyTranslator
from aspect.sqlalchemy.languages.aql.commands.BoolCommand import BoolCommand
#
def test_SqlAqlEngine_instantiation():
aql_engine = AqlEngine()
sqlalchemy_translator = SqlAlchemyTranslator()
#
assert aql_engine != None and sqlalchemy_translator != None
#
@pytest.fixture
def aql_engine():
engine = AqlEngine()
return engine
#
def test_aql_engine_parse_entity(aql_engine):
ast = aql_engine.parse('entity')
assert ast.getText() == 'entity' and ast.children[0].getText() == 'entity'
#
def test_aql_engine_parse_and_translate_entity(aql_engine):
ast = aql_engine.parse('entity')
translator = aql_engine.translate(ast)
assert ast.getText() == 'entity' and ast.children[0].getText() == 'entity'
assert translator != None and translator.visited == True
#
def test_aql_engine_parse_and_translate_true(aql_engine):
ast = aql_engine.parse('true')
translator = aql_engine.translate(ast)
assert ast.getText() == 'true' and ast.children[0].getText() == 'true'
assert translator != None and translator.visited == True
assert type(translator.command) == BoolCommand
```
#### File: standard/engines/test_Engine.py
```python
import pytest
import aspect
from aspect.standard.engines.Engine import Engine
import aspect.standard.operations
from aspect.core.operations.Operation import Operation as CoreOperation
from aspect.standard.operations.common.Echo import Echo as EchoOperation
@pytest.fixture
def engine():
engine = Engine()
# Two ways of importing operations
Engine.import_operations(module=aspect, submodule_path='standard/interpreters', recursive=True)
Engine.import_operations(module=aspect.standard.operations, recursive=True)
return engine
def test_engine_metadata():
assert Engine.Meta.model['StandardEngine'] == Engine
assert Engine.Meta.model['StandardEngine'].__name__ == 'Engine'
#
def test_engine_instantiation(engine):
assert engine.operation_execution_engine != None
#
def test_engine_new_instance(engine):
r = engine.new_instance("common.echo")
assert type(r) == EchoOperation
#
def test_engine_new_instance(engine):
r = engine.new_instance("common.echo")
assert type(r) == EchoOperation
#
def test_engine_new_instance_not_exits(engine):
with pytest.raises(Exception):
engine.new_instance("echo", message="hi!")
#
def test_engine_execute_echo(engine):
r = engine.execute('common.echo', args={'message':'hello world!'})
assert r == 'echo hello world!'
#
def test_engine_execute_ping(engine):
r = engine.execute('common.ping')
assert r == 'ack'
#
def test_package_importing_by_path():
# local_namespace, local_name
operation = __import__('aspect.standard.operations.common.Ping', fromlist=['Ping'])
instance = getattr(operation, 'Ping')()
r = instance.execute()
assert r == 'ack'
#
def test_engine_execute_reverse(engine):
r = engine.execute('common.reverse', args={'message':'hello world!'})
assert r == 'hello world!'[::-1]
def test_engine_execute_exec(engine):
r = engine.execute('common.execute', args={'signature':'common.echo', 'args':{'message': 'hi'}})
assert r == 'echo hi'
``` |
{
"source": "Jorgeromeu/brainfuck-preprocessor",
"score": 3
} |
#### File: Jorgeromeu/brainfuck-preprocessor/bfpp.py
```python
import sys
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("file", help="The brainfuck++ file to preprocess", type=str)
parser.add_argument("-o", "--out", help="File to output the brainfuck", type=str)
args = parser.parse_args()
inputfile = open(args.file)
def shft(n):
if n > 0:
return ''.join('>' for _ in range(n))
elif n < 0:
return ''.join('<' for _ in range(-n))
else:
return ''
def inc(n):
if n > 0:
return ''.join('+' for _ in range(n))
elif n < 0:
return ''.join('-' for _ in range(-n))
else:
return ''
def argument_validator(directive, args: list, expected: list[type]):
arg_types = list(map(lambda e: type(e), args))
if arg_types != expected:
sys.stderr.write(f'Invalid arguments: directive "{directive}" expects arguments of type {expected}. You gave: {arg_types}')
def stateful_if(args):
"""
conditionally evaluate the provided code
in doing this, the current cell is wiped
"""
body = str(args[0])
return f'[{body}[-]]'
def stateful_ifelse(args):
"""
conditionally evaluate the provided code
- in doing this, the current cell is wiped
- params: ifbody, elsebody, tmp_shift
"""
ifbody, elsebody, tmp = args[0], args[1], int(args[2])
code = f'{shft(tmp)}[-]+{shft(-tmp)}' # set else flag
code += f'[{ifbody}[-]{shft(tmp)}-{shft(-tmp)}]' # if block
code += f'{shft(tmp)}' # go to else flag
code += f'[{shft(-tmp)}{elsebody}-]' # return to A and execute else block
code += f'{shft(-tmp)}' # return to A
return code
def print_str(args):
string = args[0]
code = ''
for char in string:
code += inc(ord(char))
code += '.'
code += inc(-ord(char))
return code
def read_bytes(args):
n = int(args[0])
code = []
for _ in range(n):
code.append(',>')
return ''.join(code)
def move(args):
"""
move current value up to the cell "n" positions away
- pointer stays in same position
"""
n = int(args[0])
code = f'[-{shft(n)}+{shft(n)}]'
return code
def copy(args):
"""
copy current value to to the cell n positions away
- pointer stays in same position
- requires the cell at n+1 to be free
"""
n = int(args[0])
code = f'[-{shft(n)}+>+{shft(n+1)}]' # double move
code += f'{shft(n+1)}[-{shft(n+1)}+{shft(n+1)}]' # move back
code += f'{shft(n+1)}' # restore pointer
return code
def make_arr(args):
lst = args[0]
code = '>'
for val in lst:
code += f'{inc(val)}>'
code += shft(-len(lst)-1)
return code
callbacks = {
'+': lambda args: inc(args[0]),
'-': lambda args: inc(-args[0]),
'>': lambda args: shft(args[0]),
'<': lambda args: shft(-args[0]),
'print': print_str,
'read_bytes': read_bytes,
'mv': move,
'cp': copy,
'if': stateful_if,
'ifelse': stateful_ifelse,
'make_arr': make_arr,
'arr_end_bd': lambda args: '>[>]',
'arr_beg_bd': lambda args: '<[<]',
'arr_end_in': lambda args: '[>]<',
'arr_beg_in': lambda args: '>[<]',
}
def parse_directive(directive: str):
parsed = directive[1:-1].split(' ')
identifier = parsed[0]
args = parsed[1:]
args = list(map(lambda e: eval(e), args))
return (identifier, args)
for line in inputfile.readlines():
match = re.search(r'\(.*\)', line)
if match:
identifier, args = parse_directive(match.group())
try:
new_txt = callbacks[identifier](args)
except KeyError:
sys.stderr.write(f'bfPP: directive "{identifier}" does not exist\n')
line = re.sub(r'\(.*\)', new_txt, line)
print(line, end='')
``` |
{
"source": "jorgerpo/bitcart",
"score": 2
} |
#### File: bitcart/api/models.py
```python
import os
from bitcart import BTC
from fastapi import HTTPException
from sqlalchemy.orm import relationship
from . import settings, utils
from .db import db
# shortcuts
RPC_URL = settings.RPC_URL
RPC_USER = settings.RPC_USER
RPC_PASS = settings.RPC_PASS
Column = db.Column
Integer = db.Integer
String = db.String
Boolean = db.Boolean
Numeric = db.Numeric
DateTime = db.DateTime
Text = db.Text
ForeignKey = db.ForeignKey
class User(db.Model):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
username = Column(String, index=True)
email = Column(String, index=True)
hashed_password = Column(String)
is_superuser = Column(Boolean(), default=False)
token = relationship("Token", uselist=False, back_populates="users")
class Token(db.Model):
__tablename__ = "tokens"
key = Column(String(length=40), primary_key=True)
user_id = Column(Integer, ForeignKey("users.id", ondelete="CASCADE"))
user = relationship("User", back_populates="tokens")
created = Column(DateTime(True))
@classmethod
async def create(cls, user: User):
user_id = user.id
key = os.urandom(20).hex()
created = utils.now()
return await super().create(user_id=user_id, key=key, created=created)
class Wallet(db.Model):
__tablename__ = "wallets"
id = Column(Integer, primary_key=True, index=True)
name = Column(String(length=1000), unique=True, index=True)
xpub = Column(String(length=1000), unique=True, index=True)
balance = Column(Numeric(16, 8))
user_id = Column(Integer, ForeignKey(User.id, ondelete="SET NULL"))
user = relationship(User, backref="wallets")
class Store(db.Model):
__tablename__ = "stores"
id = Column(Integer, primary_key=True, index=True)
name = Column(String(1000), unique=True, index=True)
domain = Column(String(1000), unique=True, index=True)
template = Column(String(1000))
email = Column(String(1000), unique=True, index=True)
wallet_id = Column(
ForeignKey(
"wallets.id", deferrable=True, initially="DEFERRED", ondelete="SET NULL"
),
index=True,
)
email_host = Column(String(1000))
email_password = Column(String(1000))
email_port = Column(Integer)
email_use_ssl = Column(Boolean)
email_user = Column(String(1000))
wallet = relationship("Wallet")
class Product(db.Model):
__tablename__ = "products"
id = Column(Integer, primary_key=True, index=True)
amount = Column(Numeric(16, 8), nullable=False)
quantity = Column(Numeric(16, 8), nullable=False)
title = Column(String(1000), nullable=False)
date = Column(DateTime(True), nullable=False)
description = Column(Text)
image = Column(String(100))
store_id = Column(
ForeignKey(
"stores.id", deferrable=True, initially="DEFERRED", ondelete="SET NULL"
),
index=True,
)
status = Column(String(1000), nullable=False)
store = relationship("Store")
class ProductxInvoice(db.Model):
__tablename__ = "productsxinvoices"
product_id = Column(Integer, ForeignKey("products.id", ondelete="SET NULL"))
invoice_id = Column(Integer, ForeignKey("invoices.id", ondelete="SET NULL"))
from gino.crud import UpdateRequest
class MyUpdateRequest(UpdateRequest):
def update(self, **kwargs):
self.products = kwargs.get("products")
if self.products:
kwargs.pop("products")
return super().update(**kwargs)
async def apply(self):
await ProductxInvoice.delete.where(
ProductxInvoice.invoice_id == self._instance.id
).gino.status()
for i in self.products:
await ProductxInvoice.create(invoice_id=self._instance.id, product_id=i)
self._instance.products = self.products
return await super().apply()
class Invoice(db.Model):
__tablename__ = "invoices"
_update_request_cls = MyUpdateRequest
id = Column(Integer, primary_key=True, index=True)
amount = Column(Numeric(16, 8), nullable=False)
status = Column(String(1000), nullable=False)
date = Column(DateTime(True), nullable=False)
bitcoin_address = Column(String(255), nullable=False)
bitcoin_url = Column(String(255), nullable=False)
products = relationship("Product", secondary=ProductxInvoice)
@classmethod
async def create(cls, **kwargs):
products = kwargs["products"]
if not products:
raise HTTPException(422, "Products list empty")
product = await Product.get(products[0])
if not product:
raise HTTPException(422, f"Product {products[0]} doesn't exist!")
store = await Store.get(product.store_id)
if not store:
raise HTTPException(422, f"Store {product.store_id} doesn't exist!")
wallet = await Wallet.get(store.wallet_id)
if not wallet:
raise HTTPException(422, "No wallet linked")
xpub = wallet.xpub
async with BTC(RPC_URL, rpc_user=RPC_USER, rpc_pass=RPC_PASS, xpub=xpub) as btc:
data_got = await btc.addrequest(
kwargs["amount"], description=product.description
)
kwargs["bitcoin_address"] = data_got["address"]
kwargs["bitcoin_url"] = data_got["URI"]
kwargs.pop("products")
return await super().create(**kwargs), xpub
```
#### File: jorgerpo/bitcart/main.py
```python
from fastapi import FastAPI
from api.db import CONNECTION_STR, db
from api.views import router
from api import settings
app = FastAPI(title="Bitcart", version="1.0")
app.include_router(router)
@app.on_event("startup")
async def startup():
await db.set_bind(CONNECTION_STR)
await db.gino.create_all()
@app.on_event("shutdown")
async def shutdown():
if settings.TEST:
await db.gino.drop_all()
await settings.btc.close()
``` |
{
"source": "JorgeRubio96/Compiladores",
"score": 4
} |
#### File: Compiladores/tarea1/Hash_Directory.py
```python
class Hash:
#funcion inicial, con esta funcion siempre se va
#a iniciar cuando llamemos a un objeto de la clase
#en este caso se inicia un arreglo vacio con un tamanio fijo
#para caso practico el tamanio sera de 5
def __init__(self):
self.size = 5
self.map = [None] * self.size
# _get_pos, es el metodo con el cual conocemos la ubicacion ideal
# para nuestro elemento, esto basado en un calculo para omitir duplicar
# o empalmar elementos, de ese modo tener un elemento en cada ubicacion y
# evitar choques. Para eso utilizaremos el valor ASCII de cada letra de nuestro
# indice y sacarle el % size(tamanio de la tabla/diccionario) para lograr una mejor ubicacion
def _get_pos(self, key):
hash = 0
for char in str(key) : #ciclo que recorre caracter por caracter nuestro indice en caso de que sea una palabra
hash += ord(char) #sumar el valor del caracter
#ord(ch) es una funcion de python que regresa el valor ASCII del caracter dado
return hash % self.size #se regresa la ubicacion ideal
# add, es el metodo que agrega a la tabla el valor segun el indice
# almacena el valor en nuestro direcotrio
def add(self, key, value):
key_hash = self._get_pos(key) #obtenemos la posicion en la cual vamos a insertar el valor
#utilizando el metodo anterior
key_value = [key, value] #creamos una variable que va a ser el "valor" que vamos a insertar en nuestra
#tabla, este valor se forma por la tupla key y value
if self.map[key_hash] is None: #revisamos si la ubicacion/index esta disponible
self.map[key_hash] = list([key_value]) #si esta disponible, insertamos el valor
return True #regresamos true para informar que ya se guardo el valor
else:
for pair in self.map[key_hash]: #si encontramos que ya esta ocupada, vamos a iterar por todo nuestro diccionario
if pair[0] == key: #ya que encontramos el indice
pair[1] = value #agregamos el valor a la pareja
return True #regresamos true para informar que ya se guardo el valor
self.map[key_hash].append(key_value) #si no encontramos el indice, creamos uno
return True
#delete, es el metodo que elimina
# elementos del diccionario
def delete(self, key):
key_hash = self._get_pos(key) #primero obtenemos la posicion del indice deseado a eliminar
if self.map[key_hash] is None: #revisar si el indice existe
return False #si no exise el indice, regresamos false
for i in range(0, len(self.map[key_hash])): #iteramos por todo el diccionario para buscar la posicion
if self.map[key_hash][i][0] == key: #ya que encontremos el elemento dentro del mapa
self.map[key_hash].pop(i) # lo eliminamos del diccionario
return True
#print, es la funcion que simplemente imprime todo lo que esta en el diccionario
def print(self):
for item in self.map:
if item is not None: #si el elemento no esta vacio entonces lo imprimimos
print(str(item))
```
#### File: Compiladores/tarea1/Stack.py
```python
class Stack:
#funcion inicial, con esta funcion siempre se va
#a iniciar cuando llamemos a un objeto de la clase
#en este caso se crea un arreglo vacio
def __init__(self):
self.items = []
#isEmpty es la funcion que revisa si mi arreglo/lista (que va a representar a nuestro stack)
#esta vacia, y a partir de la respuesta puedo tomar accion
#para agregar o sacar elementos
def isEmpty(self):
return self.items == []
#push es el metodo que nos permite agregar elementos
#a nuestro stack
def push(self, item):
self.items.append(item)
#pop es el metodo para eliminar elementos de nuestro stack
#importante recordar la logica del stack (lifo), por lo cual
#se utiliza pop, ya que no nos interesa la posición del ultimo
#elemento, simplemente nos interesa que el ultimo elemento
#desaparezca de la lista
def pop(self):
return self.items.pop()
#last es el metodo que regresa el ultimo elemento del stack
#o el elemento que esta en el tope del stack
def last(self):
return self.items.[len(self.items)-1]
#size es el metodo que nos regresa el tamanio de
#nuestro stack
def size(self):
return len(self.items)
``` |
{
"source": "JorgeRubio96/grease-lang",
"score": 3
} |
#### File: JorgeRubio96/grease-lang/glc.py
```python
import sys
from grease.scanner import grease_lexer
from grease.parser import grease_parser, greaser
def _main():
data = ''
if len(sys.argv) > 1:
with open(sys.argv[1], 'r') as src_file:
data = src_file.read()
else:
for line in sys.stdin:
data = data + line
compile(data)
def compile(data):
result = grease_parser.parse(data,lexer=grease_lexer, debug=False, tracking=True)
greaser.write_to_file('out.gbc')
if __name__ == '__main__':
_main()
```
#### File: grease/core/function.py
```python
from grease.core.variable_table import VariableTable
from grease.core.exceptions import VariableRedefinition
from grease.core.variable import AddressingMethod
class GreaseFn:
def __init__(self, params, return_type, param_types=[], size=0, start=0):
self.params = params
self.return_type = return_type
self._variables = None
self.param_types = param_types
self.size = size
self.start = start
def open_scope(self, global_vars=None):
self._variables = VariableTable(global_vars)
for name, param in self.params.items():
self._variables.add_variable(name, param)
return self._variables
def close_scope(self):
parent = self._variables.parent
self._variables = None
return parent
class GreaseFnBuilder:
def __init__(self):
self._params = {}
self._return_type = None
self._name = None
self._struct = None
self._param_types = []
def add_param(self, param_name, param):
if param_name in self._params:
raise VariableRedefinition('{}'.format(param_name))
self._params[param_name] = param
param.method = AddressingMethod.Relative
param._address = len(self._param_types) + 2
self._param_types.append(param.type)
def add_return_type(self, return_type):
self._return_type = return_type
def add_name(self, name):
self._name = name
def add_struct(self, struct):
self._struct = struct
def build(self):
return self._name, self._struct, GreaseFn(self._params, self._return_type, self._param_types)
def reset(self):
self._params = {}
self._return_type = None
self._name = None
self._struct = None
self._param_types = []
```
#### File: grease/core/interface.py
```python
from grease.core.function_directory import FunctionDirectory
from grease.core.exceptions import FunctionRedefinition
class GreaseInterface:
def __init__(self, functions={}):
self.functions = FunctionDirectory()
for name, fn in functions.items():
success = self.functions.add_function(name, fn)
if not success:
raise FunctionRedefinition(name)
```
#### File: grease/core/type.py
```python
from enum import Enum
class GreaseTypeClass(Enum):
Int = 1
Float = 2
Char = 3
Array = 4
Struct = 5
Pointer = 6
Bool = 7
Interface = 8
class GreaseType:
def __init__(self, type_class, type_data=None, dimens=None, size=1):
self.type_class = type_class
self.type_data = type_data
self.dimens = dimens
self.size = size
def __repr__(self):
if self.type_class is GreaseTypeClass.Array:
return '[{}, {}]'.format(self.type_data, self.dimens)
elif self.type_class is GreaseTypeClass.Pointer:
return '* {}'.format(self.type_data)
elif self.type_class is GreaseTypeClass.Struct:
return 'Struct({})'.format(self.type_data)
return 'Instace {}'.format(self.type_class)
```
#### File: grease-lang/grease/scanner.py
```python
import ply.lex as lex
from grease.core.indents import Indents
reserved = {
'var': 'VAR',
'if': 'IF',
'else': 'ELSE',
'scan': 'SCAN',
'print': 'PRINT',
'and': 'AND',
'or': 'OR',
'Bool': 'BOOL',
'Int': 'INT',
'Float': 'FLOAT',
'Char': 'CHAR',
'fn': 'FN',
'interface': 'INTERFACE',
'import': 'IMPORT',
'struct':'STRUCT',
'while':'WHILE',
'alias':'ALIAS',
'as':'AS',
'gt': 'GT',
'ge': 'GE',
'lt': 'LT',
'le': 'LE',
'eq': 'EQ',
'not':'NOT',
'from': 'FROM',
'return': 'RETURN',
'true': 'TRUE',
'false': 'FALSE'
}
tokens = [
'ID', 'CONST_INT', 'CONST_REAL', 'CONST_STR', 'CONST_CHAR',
'ARROW', 'SEMICOLON', 'COLON', 'COMMA', 'DOT', 'EQUALS', 'NEW_LINE',
'OPEN_BRACK','CLOSE_BRACK', 'OPEN_PAREN', 'CLOSE_PAREN', 'PLUS', 'MINUS',
'TIMES', 'DIVIDE', 'AMP', 'INDENT', 'DEDENT'
] + list(reserved.values())
t_DOT = r'\.'
t_SEMICOLON = r'\;'
t_COLON = r'\:'
t_COMMA = r'\,'
t_OPEN_BRACK = r'\['
t_CLOSE_BRACK = r'\]'
t_EQUALS = r'\='
t_OPEN_PAREN = r'\('
t_CLOSE_PAREN = r'\)'
t_PLUS = r'\+'
t_MINUS = r'\-'
t_TIMES = r'\*'
t_DIVIDE = r'\/'
t_AMP = r'\&'
t_ARROW = r'\-\>'
t_ignore = ' '
def t_ignore_SINGLE_COMMENT(t):
r'\#.*\n'
t.lexer.lineno += 1
def t_ignore_MULTI_COMMENT(t):
r'\/\*[\s\S]*\*\/\s*'
t.lexer.lineno += t.value.count('\n')
def t_ID(t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
t.type = reserved.get(t.value, 'ID')
if t.type == 'CONST_BOOL':
if t.value == 'true':
t.value = True
else:
t.value = False
return t
def t_CONST_REAL(t):
r'[0-9]+\.[0-9]+'
t.value = float(t.value)
return t
def t_CONST_INT(t):
r'[0-9]+'
t.value = int(t.value)
return t
def t_CONST_STR(t):
r'\".+\"'
t.value = t.value[1:-1]
return t
def t_CONST_CHAR(t):
r'\'.+\''
t.value = t.value[1:-1]
return t
def t_NEW_LINE(t):
r'\n\s*[\t ]*'
t.lexer.lineno += t.value.count('\n')
t.value = len(t.value) - 1 - t.value.rfind('\n')
return t
def first_word(s):
whites = [' ', '\t', '\n']
low = 0
for l in s:
if l in whites:
break
low += 1
return s[0:low]
def t_error(t):
print("Unexpected \"{}\" at line {}".format(first_word(t.value), t.lexer.lineno))
grease_lexer = Indents(lex.lex())
``` |
{
"source": "jorges119/localstack",
"score": 2
} |
#### File: services/redshift/redshift_starter.py
```python
from moto.redshift import responses as redshift_responses
from localstack import config
from localstack.services.infra import start_moto_server
from localstack.utils.common import recurse_object
def apply_patches():
# patch itemize() to return proper XML response tags
def itemize(data, parent_key=None, *args, **kwargs):
# TODO: potentially add additional required tags here!
list_parent_tags = ["ClusterSubnetGroups"]
def fix_keys(o, **kwargs):
if isinstance(o, dict):
for k, v in o.items():
if k in list_parent_tags:
if isinstance(v, dict) and "item" in v:
v[k[:-1]] = v.pop("item")
return o
result = itemize_orig(data, *args, **kwargs)
recurse_object(result, fix_keys)
return result
itemize_orig = redshift_responses.itemize
redshift_responses.itemize = itemize
def start_redshift(port=None, asynchronous=False):
port = port or config.PORT_REDSHIFT
apply_patches()
return start_moto_server("redshift", port, name="Redshift", asynchronous=asynchronous)
```
#### File: integration/lambdas/lambda_python3.py
```python
def handler(event, context):
# the following line is Python 3.6+ specific
msg = f"Successfully processed {event}" # noqa This code is Python 3.6+ only
return event
```
#### File: integration/lambdas/lambda_start_execution.py
```python
import json
import os
import boto3
# TODO - merge this file with lambda_send_message.py, to avoid duplication
EDGE_PORT = 4566
def handler(event, context):
protocol = "https" if os.environ.get("USE_SSL") else "http"
endpoint_url = "{}://{}:{}".format(protocol, os.environ["LOCALSTACK_HOSTNAME"], EDGE_PORT)
sf = boto3.client(
"stepfunctions",
endpoint_url=endpoint_url,
region_name=event["region_name"],
verify=False,
)
sf.start_execution(stateMachineArn=event["state_machine_arn"], input=json.dumps(event["input"]))
return 0
```
#### File: tests/integration/test_edge.py
```python
import io
import json
import os
import time
import unittest
import requests
from localstack import config
from localstack.services.generic_proxy import ProxyListener, start_proxy_server
from localstack.utils.aws import aws_stack
from localstack.utils.bootstrap import is_api_enabled
from localstack.utils.common import get_free_tcp_port, get_service_protocol, short_uid, to_str
class TestEdgeAPI(unittest.TestCase):
def test_invoke_apis_via_edge(self):
edge_port = config.EDGE_PORT_HTTP or config.EDGE_PORT
edge_url = "%s://localhost:%s" % (get_service_protocol(), edge_port)
if is_api_enabled("s3"):
self._invoke_s3_via_edge(edge_url)
self._invoke_s3_via_edge_multipart_form(edge_url)
if is_api_enabled("kinesis"):
self._invoke_kinesis_via_edge(edge_url)
if is_api_enabled("dynamodbstreams"):
self._invoke_dynamodbstreams_via_edge(edge_url)
if is_api_enabled("firehose"):
self._invoke_firehose_via_edge(edge_url)
if is_api_enabled("stepfunctions"):
self._invoke_stepfunctions_via_edge(edge_url)
def _invoke_kinesis_via_edge(self, edge_url):
client = aws_stack.connect_to_service("kinesis", endpoint_url=edge_url)
result = client.list_streams()
self.assertIn("StreamNames", result)
def _invoke_dynamodbstreams_via_edge(self, edge_url):
client = aws_stack.connect_to_service("dynamodbstreams", endpoint_url=edge_url)
result = client.list_streams()
self.assertIn("Streams", result)
def _invoke_firehose_via_edge(self, edge_url):
client = aws_stack.connect_to_service("firehose", endpoint_url=edge_url)
result = client.list_delivery_streams()
self.assertIn("DeliveryStreamNames", result)
def _invoke_stepfunctions_via_edge(self, edge_url):
client = aws_stack.connect_to_service("stepfunctions", endpoint_url=edge_url)
result = client.list_state_machines()
self.assertIn("stateMachines", result)
def _invoke_s3_via_edge(self, edge_url):
client = aws_stack.connect_to_service("s3", endpoint_url=edge_url)
bucket_name = "edge-%s" % short_uid()
client.create_bucket(Bucket=bucket_name)
result = client.head_bucket(Bucket=bucket_name)
self.assertEqual(200, result["ResponseMetadata"]["HTTPStatusCode"])
client.delete_bucket(Bucket=bucket_name)
bucket_name = "edge-%s" % short_uid()
object_name = "testobject"
bucket_url = "%s/%s" % (edge_url, bucket_name)
result = requests.put(bucket_url, verify=False)
self.assertEqual(200, result.status_code)
result = client.head_bucket(Bucket=bucket_name)
self.assertEqual(200, result["ResponseMetadata"]["HTTPStatusCode"])
headers = {"Content-Type": "application/x-www-form-urlencoded"}
result = requests.post(
bucket_url,
data="key=%s&file=file_content_123" % object_name,
headers=headers,
verify=False,
)
self.assertEqual(204, result.status_code)
bucket_url = "%s/example" % bucket_url
result = requests.put(bucket_url, data="hello", verify=False)
self.assertEqual(200, result.status_code)
result = io.BytesIO()
client.download_fileobj(bucket_name, object_name, result)
self.assertEqual("file_content_123", to_str(result.getvalue()))
def _invoke_s3_via_edge_multipart_form(self, edge_url):
client = aws_stack.connect_to_service("s3", endpoint_url=edge_url)
bucket_name = "edge-%s" % short_uid()
object_name = "testobject"
object_data = b"testdata"
client.create_bucket(Bucket=bucket_name)
presigned_post = client.generate_presigned_post(bucket_name, object_name)
files = {"file": object_data}
r = requests.post(
presigned_post["url"],
data=presigned_post["fields"],
files=files,
verify=False,
)
self.assertEqual(204, r.status_code)
result = io.BytesIO()
client.download_fileobj(bucket_name, object_name, result)
self.assertEqual(to_str(object_data), to_str(result.getvalue()))
client.delete_object(Bucket=bucket_name, Key=object_name)
client.delete_bucket(Bucket=bucket_name)
def test_http2_traffic(self):
port = get_free_tcp_port()
class MyListener(ProxyListener):
def forward_request(self, method, path, data, headers):
return {"method": method, "path": path, "data": data}
url = "https://localhost:%s/foo/bar" % port
listener = MyListener()
proxy = start_proxy_server(port, update_listener=listener, use_ssl=True)
time.sleep(1)
response = requests.post(url, verify=False)
self.assertEqual(
{"method": "POST", "path": "/foo/bar", "data": ""},
json.loads(to_str(response.content)),
)
proxy.stop()
def test_invoke_sns_sqs_integration_using_edge_port(self):
edge_port = config.EDGE_PORT_HTTP or config.EDGE_PORT
region_original = os.environ.get("DEFAULT_REGION")
os.environ["DEFAULT_REGION"] = "us-southeast-2"
edge_url = "%s://localhost:%s" % (get_service_protocol(), edge_port)
sns_client = aws_stack.connect_to_service("sns", endpoint_url=edge_url)
sqs_client = aws_stack.connect_to_service("sqs", endpoint_url=edge_url)
topic = sns_client.create_topic(Name="test_topic3")
topic_arn = topic["TopicArn"]
test_queue = sqs_client.create_queue(QueueName="test_queue3")
queue_url = test_queue["QueueUrl"]
sqs_client.get_queue_attributes(QueueUrl=queue_url, AttributeNames=["QueueArn"])
sns_client.subscribe(TopicArn=topic_arn, Protocol="sqs", Endpoint=queue_url)
sns_client.publish(TargetArn=topic_arn, Message="Test msg")
response = sqs_client.receive_message(
QueueUrl=queue_url,
AttributeNames=["SentTimestamp"],
MaxNumberOfMessages=1,
MessageAttributeNames=["All"],
VisibilityTimeout=2,
WaitTimeSeconds=2,
)
self.assertEqual(1, len(response["Messages"]))
os.environ.pop("DEFAULT_REGION")
if region_original is not None:
os.environ["DEFAULT_REGION"] = region_original
```
#### File: tests/integration/test_error_injection.py
```python
import unittest
import pytest
from botocore.exceptions import ClientError
from localstack import config
from localstack.utils.aws import aws_stack
from localstack.utils.common import short_uid
from .lambdas import lambda_integration
from .test_integration import PARTITION_KEY, TEST_TABLE_NAME
TEST_STREAM_NAME = lambda_integration.KINESIS_STREAM_NAME
def should_run():
return config.is_env_true("TEST_ERROR_INJECTION")
class TestErrorInjection(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
if not should_run():
pytest.skip("skipping TestErrorInjection (TEST_ERROR_INJECTION not set or false)")
def test_kinesis_error_injection(self):
kinesis = aws_stack.connect_to_service("kinesis")
aws_stack.create_kinesis_stream(TEST_STREAM_NAME)
records = [{"Data": "0", "ExplicitHashKey": "0", "PartitionKey": "0"}]
# by default, no errors
test_no_errors = kinesis.put_records(StreamName=TEST_STREAM_NAME, Records=records)
assert test_no_errors["FailedRecordCount"] == 0
# with a probability of 1, always throw errors
config.KINESIS_ERROR_PROBABILITY = 1.0
test_all_errors = kinesis.put_records(StreamName=TEST_STREAM_NAME, Records=records)
assert test_all_errors["FailedRecordCount"] == 1
# reset probability to zero
config.KINESIS_ERROR_PROBABILITY = 0.0
def get_dynamodb_table(self):
dynamodb = aws_stack.connect_to_resource("dynamodb")
# create table with stream forwarding config
aws_stack.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY)
return dynamodb.Table(TEST_TABLE_NAME)
def assert_zero_probability_read_error_injection(self, table, partition_key):
# by default, no errors
test_no_errors = table.get_item(Key={PARTITION_KEY: partition_key})
assert test_no_errors["ResponseMetadata"]["HTTPStatusCode"] == 200
def test_dynamodb_error_injection(self):
table = self.get_dynamodb_table()
partition_key = short_uid()
self.assert_zero_probability_read_error_injection(table, partition_key)
# with a probability of 1, always throw errors
config.DYNAMODB_ERROR_PROBABILITY = 1.0
with self.assertRaises(ClientError):
table.get_item(Key={PARTITION_KEY: partition_key})
# reset probability to zero
config.DYNAMODB_ERROR_PROBABILITY = 0.0
def test_dynamodb_read_error_injection(self):
table = self.get_dynamodb_table()
partition_key = short_uid()
self.assert_zero_probability_read_error_injection(table, partition_key)
# with a probability of 1, always throw errors
config.DYNAMODB_READ_ERROR_PROBABILITY = 1.0
with self.assertRaises(ClientError):
table.get_item(Key={PARTITION_KEY: partition_key})
# reset probability to zero
config.DYNAMODB_READ_ERROR_PROBABILITY = 0.0
def test_dynamodb_write_error_injection(self):
table = self.get_dynamodb_table()
# by default, no errors
test_no_errors = table.put_item(Item={PARTITION_KEY: short_uid(), "data": "foobar123"})
self.assertEqual(200, test_no_errors["ResponseMetadata"]["HTTPStatusCode"])
# with a probability of 1, always throw errors
config.DYNAMODB_WRITE_ERROR_PROBABILITY = 1.0
with self.assertRaises(ClientError):
table.put_item(Item={PARTITION_KEY: short_uid(), "data": "foobar123"})
# BatchWriteItem throws ProvisionedThroughputExceededException if ALL items in Batch are Throttled
with self.assertRaises(ClientError):
table.batch_write_item(
RequestItems={
table: [
{
"PutRequest": {
"Item": {
PARTITION_KEY: short_uid(),
"data": "foobar123",
}
}
}
]
}
)
# reset probability to zero
config.DYNAMODB_WRITE_ERROR_PROBABILITY = 0.0
```
#### File: tests/integration/test_kms.py
```python
import unittest
from localstack import config
from localstack.constants import TEST_AWS_ACCOUNT_ID
from localstack.utils.aws import aws_stack
class KMSTest(unittest.TestCase):
def test_create_key(self):
client = aws_stack.connect_to_service("kms")
response = client.list_keys()
self.assertEqual(200, response["ResponseMetadata"]["HTTPStatusCode"])
keys_before = response["Keys"]
response = client.create_key(
Policy="policy1", Description="test key 123", KeyUsage="ENCRYPT_DECRYPT"
)
self.assertEqual(200, response["ResponseMetadata"]["HTTPStatusCode"])
key_id = response["KeyMetadata"]["KeyId"]
response = client.list_keys()
self.assertEqual(len(keys_before) + 1, len(response["Keys"]))
response = client.describe_key(KeyId=key_id)["KeyMetadata"]
self.assertEqual(key_id, response["KeyId"])
self.assertIn(":%s:" % config.DEFAULT_REGION, response["Arn"])
self.assertIn(":%s:" % TEST_AWS_ACCOUNT_ID, response["Arn"])
```
#### File: tests/unit/test_logs.py
```python
import unittest
from localstack.services.logs.logs_listener import log_events_match_filter_pattern
class CloudWatchLogsTest(unittest.TestCase):
def test_filter_expressions(self):
def assert_match(pattern, log_events, expected):
result = log_events_match_filter_pattern(pattern, log_events)
self.assertTrue(result) if expected else self.assertFalse(result)
log_events = [{"message": "test123"}, {"message": "foo bar 456"}]
assert_match("*", log_events, True)
assert_match("", log_events, True)
assert_match("INVALID", log_events, False)
```
#### File: tests/unit/test_tagging.py
```python
import unittest
from localstack.utils.tagging import TaggingService
class TestTaggingService(unittest.TestCase):
svc = TaggingService()
def test_list_empty(self):
result = self.svc.list_tags_for_resource("test")
self.assertEqual({"Tags": []}, result)
def test_create_tag(self):
tags = [{"Key": "key_key", "Value": "value_value"}]
self.svc.tag_resource("arn", tags)
actual = self.svc.list_tags_for_resource("arn")
expected = {"Tags": [{"Key": "key_key", "Value": "value_value"}]}
self.assertDictEqual(expected, actual)
def test_delete_tag(self):
tags = [{"Key": "key_key", "Value": "value_value"}]
self.svc.tag_resource("arn", tags)
self.svc.untag_resource("arn", ["key_key"])
result = self.svc.list_tags_for_resource("arn")
self.assertEqual({"Tags": []}, result)
def test_list_empty_delete(self):
self.svc.untag_resource("arn", ["key_key"])
result = self.svc.list_tags_for_resource("arn")
self.assertEqual({"Tags": []}, result)
``` |
{
"source": "jorgesalhani/LearningFastAPI",
"score": 2
} |
#### File: blog/routers/blog.py
```python
from fastapi import APIRouter
from fastapi.params import Depends
from sqlalchemy.orm.session import Session
from starlette import status
from .. import schemas, database, oauth2
from typing import List
from ..repository import blog
router = APIRouter(
prefix='/blog',
tags=['Blogs']
)
get_db = database.get_db
@router.get('/', response_model=List[schemas.ShowBlog])
def all(
db: Session = Depends(get_db),
get_current_user: schemas.User = Depends(oauth2.get_current_user)
):
return blog.get_all(db=db)
@router.post('/', status_code=status.HTTP_201_CREATED)
def create(
request: schemas.Blog,
db: Session = Depends(get_db),
get_current_user: schemas.User = Depends(oauth2.get_current_user)
):
return blog.create(request=request, db=db)
@router.delete('/{id}', status_code=status.HTTP_202_ACCEPTED)
def destroy(
id,
db: Session = Depends(get_db),
get_current_user: schemas.User = Depends(oauth2.get_current_user)
):
return blog.destroy(id=id, db=db)
@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)
def update(
id,
request: schemas.Blog,
db: Session = Depends(get_db),
get_current_user: schemas.User = Depends(oauth2.get_current_user)
):
return blog.update(id=id, request=request, db=db)
@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas.ShowBlog)
def show(
id,
db: Session = Depends(get_db),
get_current_user: schemas.User = Depends(oauth2.get_current_user)
):
return blog.show(id=id, db=db)
``` |
{
"source": "Jorge-Salmon/DoubleHelix",
"score": 2
} |
#### File: Jorge-Salmon/DoubleHelix/engine_ui.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(801, 610)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(10, 0, 781, 571))
self.tabWidget.setObjectName("tabWidget")
self.tab_1 = QtWidgets.QWidget()
self.tab_1.setObjectName("tab_1")
self.pushButton_info = QtWidgets.QPushButton(self.tab_1)
self.pushButton_info.setGeometry(QtCore.QRect(10, 320, 101, 23))
self.pushButton_info.setObjectName("pushButton_info")
self.text_input = QtWidgets.QTextEdit(self.tab_1)
self.text_input.setGeometry(QtCore.QRect(130, 20, 621, 241))
self.text_input.setObjectName("text_input")
self.DNA_Button = QtWidgets.QRadioButton(self.tab_1)
self.DNA_Button.setGeometry(QtCore.QRect(10, 40, 51, 17))
self.DNA_Button.setObjectName("DNA_Button")
self.RNA_button = QtWidgets.QRadioButton(self.tab_1)
self.RNA_button.setGeometry(QtCore.QRect(70, 40, 41, 17))
self.RNA_button.setObjectName("RNA_button")
self.label = QtWidgets.QLabel(self.tab_1)
self.label.setGeometry(QtCore.QRect(10, 70, 47, 13))
self.label.setObjectName("label")
self.pushButton_load = QtWidgets.QPushButton(self.tab_1)
self.pushButton_load.setGeometry(QtCore.QRect(10, 240, 101, 23))
self.pushButton_load.setObjectName("pushButton_load")
self.pushButton_randSeq = QtWidgets.QPushButton(self.tab_1)
self.pushButton_randSeq.setGeometry(QtCore.QRect(10, 170, 101, 23))
self.pushButton_randSeq.setObjectName("pushButton_randSeq")
self.label_length = QtWidgets.QLabel(self.tab_1)
self.label_length.setGeometry(QtCore.QRect(10, 120, 47, 13))
self.label_length.setObjectName("label_length")
self.textEdit_label = QtWidgets.QLineEdit(self.tab_1)
self.textEdit_label.setGeometry(QtCore.QRect(10, 90, 101, 20))
self.textEdit_label.setObjectName("textEdit_label")
self.textEdit_length = QtWidgets.QLineEdit(self.tab_1)
self.textEdit_length.setGeometry(QtCore.QRect(10, 140, 101, 20))
self.textEdit_length.setObjectName("textEdit_length")
self.textEdit_console = QtWidgets.QTextEdit(self.tab_1)
self.textEdit_console.setGeometry(QtCore.QRect(130, 320, 621, 201))
self.textEdit_console.setObjectName("textEdit_console")
self.label_seq_info = QtWidgets.QLabel(self.tab_1)
self.label_seq_info.setGeometry(QtCore.QRect(130, 300, 41, 16))
self.label_seq_info.setObjectName("label_seq_info")
self.pushButton_save = QtWidgets.QPushButton(self.tab_1)
self.pushButton_save.setGeometry(QtCore.QRect(10, 500, 101, 23))
self.pushButton_save.setObjectName("pushButton_save")
self.pushButton_transcript = QtWidgets.QPushButton(self.tab_1)
self.pushButton_transcript.setGeometry(QtCore.QRect(10, 350, 101, 23))
self.pushButton_transcript.setObjectName("pushButton_transcript")
self.pushButton_translate = QtWidgets.QPushButton(self.tab_1)
self.pushButton_translate.setGeometry(QtCore.QRect(10, 380, 101, 23))
self.pushButton_translate.setObjectName("pushButton_translate")
self.pushButton_readingFrames = QtWidgets.QPushButton(self.tab_1)
self.pushButton_readingFrames.setGeometry(QtCore.QRect(10, 410, 101, 23))
self.pushButton_readingFrames.setObjectName("pushButton_readingFrames")
self.pushButton_proteinsORF = QtWidgets.QPushButton(self.tab_1)
self.pushButton_proteinsORF.setGeometry(QtCore.QRect(10, 440, 101, 23))
self.pushButton_proteinsORF.setObjectName("pushButton_proteinsORF")
self.label_2 = QtWidgets.QLabel(self.tab_1)
self.label_2.setGeometry(QtCore.QRect(10, 220, 91, 16))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.tab_1)
self.label_3.setGeometry(QtCore.QRect(10, 20, 91, 16))
self.label_3.setObjectName("label_3")
self.tabWidget.addTab(self.tab_1, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.tabWidget.addTab(self.tab_2, "")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 801, 21))
self.menubar.setObjectName("menubar")
self.menuwora = QtWidgets.QMenu(self.menubar)
self.menuwora.setObjectName("menuwora")
self.menuTools = QtWidgets.QMenu(self.menubar)
self.menuTools.setObjectName("menuTools")
self.menuAbout = QtWidgets.QMenu(self.menubar)
self.menuAbout.setObjectName("menuAbout")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.menubar.addAction(self.menuwora.menuAction())
self.menubar.addAction(self.menuTools.menuAction())
self.menubar.addAction(self.menuAbout.menuAction())
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.pushButton_info.setText(_translate("MainWindow", "Info"))
self.text_input.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.DNA_Button.setText(_translate("MainWindow", "DNA"))
self.RNA_button.setText(_translate("MainWindow", "RNA"))
self.label.setText(_translate("MainWindow", "Label"))
self.pushButton_load.setText(_translate("MainWindow", "Load file"))
self.pushButton_randSeq.setText(_translate("MainWindow", "Randomize"))
self.label_length.setText(_translate("MainWindow", "Length"))
self.label_seq_info.setText(_translate("MainWindow", "Output:"))
self.pushButton_save.setText(_translate("MainWindow", "Save"))
self.pushButton_transcript.setText(_translate("MainWindow", "Transcript"))
self.pushButton_translate.setText(_translate("MainWindow", "Translate"))
self.pushButton_readingFrames.setText(_translate("MainWindow", "Reading Frames"))
self.pushButton_proteinsORF.setText(_translate("MainWindow", "Proteins ORFs"))
self.label_2.setText(_translate("MainWindow", "Or load a file:"))
self.label_3.setText(_translate("MainWindow", "Generate random:"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_1), _translate("MainWindow", "Start"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("MainWindow", "Advanced"))
self.menuwora.setTitle(_translate("MainWindow", "File"))
self.menuTools.setTitle(_translate("MainWindow", "Tools"))
self.menuAbout.setTitle(_translate("MainWindow", "About"))
``` |
{
"source": "jorgesaw/imperial",
"score": 2
} |
#### File: core/factoria/factoriaVentanas.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.factoria.mostrarVentanaGui import MostrarVentanaGui
from imperial.vista import factoria
class FactoriaVentanas(object):
u"""Fábrica para crear las distintas instancias de cada ventana de la aplicación."""
@staticmethod
def crearVentanaGui(tipo, parent=None, mapParam={}):
ventana = None
if tipo in factoria.LST_GENERIC_WINDOW:
ventana = MostrarVentanaGui(tipo, parent, mapParam)
return ventana
@staticmethod
def __setearParametros(tipo, mapParam):
pass
#mapParam['clase_modelo'] = dto.getClaseModelo(tipo)
#mapParam['dao'] = dao.getClaseModelo(tipo)
#mapParam['modelo'] = Model
#mapParam['modelo_tabla'] = ModeloTabla
#mapParam['modelo_datos_tabla'] = dao.getClaseModelo(tipo)
#mapParam['ventana'] = dlg.getClaseModelo(tipo)
```
#### File: core/factoria/mostrarVentanaGui.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.vista.factoria.ventanasGui import VentanasGui
from imperial.vista import factoria
class MostrarVentanaGui(VentanasGui):
u"""Clase factoría para crear gui de CRUD genérico."""
def __init__(self, tipo, parent=None, mapParam=None):
super(MostrarVentanaGui, self).__init__(parent, mapParam)
self.tipo = tipo
def prepararVentana(self):
dicDatos = factoria.getDicConfigClases(self.tipo)
claseModelo = dicDatos.get('clase_modelo')
dao = dicDatos.get('dao')(False)
modelo = dicDatos.get('modelo')(dao, claseModelo)
modeloDatosTabla = None
mt = dicDatos.get('modelo_tabla')
if mt:
mdt = dicDatos.get('modelo_datos_tabla')
if mdt:
modeloDatosTabla = mdt()
modeloTabla = mt(modelo, modeloDatosTabla)
else:
modeloTabla = mt(modelo)
else:
modeloTabla = modelo
ventana = dicDatos.get('ventana')(modeloTabla,
self._parent)
ventana.move(0, 0)
return ventana
def tipoDlg(self):
raise "Método sin implementar."
```
#### File: core/factoria/mostrarVentanaSearchGui.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.vista.factoria.BusquedaGui import BusquedaGui
class MostrarVentanaSearchGui(BusquedaGui):
u"""Clase factoría para crear gui de búsqueda genérico."""
def __init__(self, tipo, parent=None, mapParam=None):
super(MostrarVentanaSearchGui, self).__init__(parent, mapParam)
self.tipo = tipo
self.setDatoBuscado(self.prepararVentana())
def prepararVentana(self):
from imperial.vista import factoria
dicDatos = factoria.getDicConfigClasesSearch(self.tipo)
claseModelo = dicDatos.get('clase_modelo')
dao = dicDatos.get('dao')(False)
modelo = dicDatos.get('modelo')(dao, claseModelo)
modeloDatosTabla = None
mt = dicDatos.get('modelo_tabla')
if mt:
mdt = dicDatos.get('modelo_datos_tabla')
if mdt:
modeloDatosTabla = mdt()
modeloTabla = mt(modelo, modeloDatosTabla)
else:
modeloTabla = mt(modelo)
else:
modeloTabla = modelo
busqDlg = dicDatos.get('ventana')(modeloTabla,
self.parent)
ok = True
if self.mapParam:
ok = not self.mapParam.get('carga_previa')
if self.mapParam.get('texto'):
busqDlg.busquedaLineEdit.setText(self.mapParam.get('texto'))
busqDlg.on_busquedaLineEdit_textEdited(self.mapParam.get('texto'))
ok = busqDlg.buscarDato()
if ok:
r = busqDlg.exec_()
if r: # uEligió algún dato.
return modeloTabla.filaDato(modeloTabla.filaSeleccionada)
return None
return None
def tipoDlg(self):
raise "Método sin implementar."
```
#### File: core/gui/filterReturn.py
```python
import sys
sys.path.append('./')
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import PyQt4
import PyQt4.QtCore as QtCore
import PyQt4.QtGui as QtGui
class FilterReturn(QtCore.QObject):
def eventFilter(self, source, event):
if (event.type()==QEvent.KeyPress):
key = event.key()
if key==Qt.Key_Return or key==Qt.Key_Enter:
source.emit(SIGNAL("enterPressed()"))
return QtGui.QWidget.eventFilter(self, source, event)
class FilterESC(QtCore.QObject):
def eventFilter(self, source, event):
if (event.type()==QEvent.KeyPress):
key = event.key()
#print('KEY:', key)
#print('ESC:', Qt.Key_Escape)
if key==Qt.Key_Escape:
source.emit(SIGNAL("ESCPressed()"))
return QtGui.QWidget.eventFilter(self, source, event)
```
#### File: core/gui/MsgBox.py
```python
from __future__ import absolute_import, print_function, unicode_literals
import PyQt4.QtGui as _qg
class MsgBox(object):
@staticmethod
def okToContinue(parent=None, title=u"Mate TPV - Salir",
msg=u"¿Desea salir de la aplicación?"):
if parent:
reply = _qg.QMessageBox.question(parent,
parent.tr(title),
parent.tr(msg),
parent.tr(u'Sí'),
parent.tr(u'No'),
parent.tr(u'Cancelar')
)
else:
reply = _qg.QMessageBox.question(parent,
title, msg, u'Sí', u'No', u'Cancelar')
if reply != 0: #uSi no eligió sí
return False
return True
@staticmethod
def ok_NoToContinue(parent=None, title=None, msg=None):
if parent:
reply = _qg.QMessageBox.question(parent,
parent.tr(title),
parent.tr(msg),
parent.tr(u'Sí'),
parent.tr(u'No'),
)
else:
reply = _qg.QMessageBox.question(parent,
title, msg, u'Sí', u'No')
if reply != 0: #uSi no eligió sí
return False
return True
```
#### File: core/util/rc_style.py
```python
from __future__ import absolute_import, print_function, unicode_literals
import logging
logger = logging.getLogger()
def cargarStyleSheet(f):
"""Carga un fichero de estilos si existe, sino devuelve una cadena vacía."""
style = None
if f:
try:
fh = open(f, 'r')
style = fh.read()
except Exception, e:
logger.info('No se pudo cargar el archivo.', exc_info=True)
finally:
return style if style != None else ''
```
#### File: vista/crud/CRUD.py
```python
from PyQt4.QtCore import Qt, pyqtSignature
from PyQt4.QtGui import QDialog, QMessageBox, QIcon
from imperial.core.factoria.factoriaVentanasSearch import FactoriaVentanasSearch
from imperial.core.gui.MsgBox import MsgBox
from imperial.core.gui.filterReturn import FilterReturn, FilterESC
import PyQt4
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
import sys
import ui_dlgCrud
sys.path.append('./')
#from core.vista.factoria.FactoriaBusqueda import FactoriaBusqueda
u"""Módulo que contiene la gui para interactuar con un CRUD de mascotas.
:author: <NAME>
:version: 1.0.0"""
__docformat__ = "reestructuredtext"
__version__ = "1.0.0"
MAC = hasattr(PyQt4.QtGui, "qt_mac_set_native_menubar")
class CRUD(QDialog,
ui_dlgCrud.Ui_DlgCrud):
u"""Dialogo CRUD para CRUD de masoctas."""
def __init__(self, modelo, parent=None):
u"""Inicializador de la clase DAO.
:param modelo: Modelo que maneja los datos de un tipo objeto"""
super(CRUD, self).__init__(parent)
# Crea SIGNAL-SLOTS para conectar widgets del formulario con métodos de nuestra subclase.
self.setupUi(self)
self.searchPushButton.setText('')
self.searchPushButton.setIcon(QIcon(":/find.png"))
self.modelo = modelo
self.datosTableView.setModel(self.modelo)
#self.datosTableView.setAlternatingRowColors(True)
if not MAC:
self.listPushButton.setFocusPolicy(Qt.NoFocus)
self.newPushButton.setFocusPolicy(Qt.NoFocus)
self.delPushButton.setFocusPolicy(Qt.NoFocus)
self.editPushButton.setFocusPolicy(Qt.NoFocus)
self.quitPushButton.setFocusPolicy(Qt.NoFocus)
self.on_buscarLineEdit_textEdited('')
def keyPressEvent(self, e):
if e.key() == Qt.Key_Escape:
e.ignore()
else:
QDialog.keyPressEvent(self, e)
#e.accept()
@pyqtSignature("")
def on_delPushButton_clicked(self):
self.delData()
self.updateUi()
@PyQt4.QtCore.pyqtSlot()
def on_listPushButton_clicked(self):
self.listData()
@pyqtSignature("")
def searchData(self, tipoBusqueda):
busqueda = FactoriaVentanasSearch.crearVentanaGui(tipoBusqueda, self,
{'texto': unicode(self.buscarLineEdit.text())})
dato = busqueda.datoBuscado()
if dato:
self.modelo.clear()
self.modelo.insertRowsTabla(0, [dato,])
self.updateUi()
self.buscarLineEdit.clear()
self.on_buscarLineEdit_textEdited('')
@pyqtSignature("QString")
def on_buscarLineEdit_textEdited(self, text):
enable = not self.buscarLineEdit.text().isEmpty()
self.searchPushButton.setEnabled(enable)
@pyqtSignature("")
def on_quitPushButton_clicked(self):
self.parentWidget().close()
QDialog.accept(self)
def closeEvent(self, event):
self.parentWidget().close()
def addData(self, addDlg):
r = addDlg.exec_()
if r: # Apretaron "Add"
row = self.modelo.rowCount()
ok, msg = self.modelo.insertRows(row, addDlg.data())
if ok:
QMessageBox.information(self, "Agregar datos", msg)
self.updateUi()
else:
QMessageBox.information(self, "Agregar datos", msg)
def delData(self):
index = self.datosTableView.currentIndex()
if not index.isValid():
QMessageBox.information(self, "Eliminar Datos",
u"Por favor seleccione una fila de la tabla para eliminar.")
return
row = index.row()
if not MsgBox.ok_NoToContinue(self, "Remove Datos",
u"¿Desea eliminar el dato seleccionado de \n" +
"la base de datos?\n"):
return
ok, msg = self.modelo.removeRows(row)
QMessageBox.information(self, "Eliminar datos", msg)
def editData(self, editDlg):
index = self.datosTableView.currentIndex()
if not index.isValid():
QMessageBox.information(self, "Editar Datos",
u"Por favor seleccione una fila de la tabla para editar.")
return
row = index.row()
lstData = self.modelo.datosModelo(row)
editDlg.setData(lstData)
editDlg.updateUi()
r = editDlg.exec_()
if r:
ok, msg = self.modelo.editRows(index, editDlg.data())
QMessageBox.information(self, "Editar Datos", msg)
def listData(self):
#self.modelo.listData()
ok, msg = self.modelo.listData(reverse=False)
if ok:
self.modelo.updateTable()
else:
QMessageBox.information(self, "Listar Datos",
msg)
self.updateUi()
def resizeColumns(self, columns):
for column in columns:
self.datosTableView.resizeColumnToContents(column)
def updateUi(self):
enable = self.modelo.isDatos()
for btn in (self.delPushButton, self.editPushButton):
btn.setEnabled(enable)
#self.resizeColumns(self.modelo.modeloDatosTabla.lstHeader)
```
#### File: imperial/dao/DAOProducto.py
```python
from __future__ import absolute_import, unicode_literals, print_function
from imperial.core.dao.DAOAlchemy import DAOAlchemy
from imperial.model.models import Product, Category, PrecioProd
from sqlalchemy.sql.expression import asc
from sqlalchemy.sql.functions import func
from sqlalchemy.sql.operators import collate
from sqlalchemy.orm import lazyload, noload
import sqlalchemy
class DAOProducto(DAOAlchemy):
def __init__(self, cerrarSesion=True):
super(DAOProducto, self).__init__(cerrarSesion)
def delete(self, object):
"""Elimina los datos de un modelo, pero en realidad lo que hace es
modificar la variable activo de True a False.
:param producto: Representa una mascota.
:return: Un entero que representa el id del modelo eliminado, si se produce
un error devuleve -1, si la mascota no existe devuelve 0.
:rtype: int"""
object.activo = False
id = self.update(object)
return id
def getAllByStockIdealMin(self):
"""Busca todos los productos cuyo stock sea menor que el stock ideal.
:return: Devuelve una tupla con objetos Product.
:rtype: tuple[tipo]"""
objects = []
try:
pass
#objects = self.session.query(Product).filter(\
# Product.stock < Product.ideal_stock).\
# order_by(Product.stock).all()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
def getPoductByCode(self, code, tipo):
u"""Busca un objeto en la base de datos con el código ingresado.
:param code: Representa el identificador del objeto.
:param tipo: No es necesario porque el tipo es Product. Se mantiene
por cuestiones de compatibilidad con el código ya existente.
:return: Devuelve un producto con el parámetro code, sino devuelve None."""
from imperial.model.models import Product, Category
prod = None
try:
pass
#prod = self.session.query(Product).filter(Product.code == code).one()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return prod
def datosByCol(self, mapParam):
objects = []
try:
objects = self.session.query(Product).filter(\
Product.name == mapParam['COL']).\
order_by(asc(collate(Product.name, 'NOCASE'))).all()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
def getListaDatos(self, mapParam):
"""Busca todos los productos según las iniciales del nombre pasado como parámetro.
No case sensitive.
:return: Devuelve una tupla con todos los objetos Producto.
:rtype: tuple[Product]"""
objects = []
try:
objects = self.session.query(Product).filter(Product.name.like(
mapParam['DATO_BUSQUEDA'] + '%')). \
order_by(Product.name).all()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
objects = None
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
def getProductosByCategoria(self, mapParam):
"""Busca todos los productos según las iniciales del nombre pasado como parámetro.
No case sensitive.
:return: Devuelve una tupla con todos los objetos Producto.
:rtype: tuple[Product]"""
objects = []
try:
if mapParam.get('activo'):
objects = self.session.query(Product).\
filter(Product.categoria == mapParam['categoria'],
Product.activo==True).\
order_by(asc(collate(Product.name, 'NOCASE'))).all()
else:
objects = self.session.query(Product).\
filter(Product.categoria == mapParam['categoria']).\
order_by(asc(collate(Product.name, 'NOCASE'))).all()
#order_by(asc(func.lower(Product.name))).all()
#join(Category).filter(Category.id == mapParam['categoria']).\
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
objects = None
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
def getAll(self, tipo):
"""Busca todos los objetos de un tipo guardados en la base de datos.
:return: Devuelve una tupla con todos los objetos.
:rtype: tuple[tipo]"""
objects = []
try:
objects = self.session.query(tipo).filter(Product.activo==True)\
.order_by(asc(collate(Product.name, 'NOCASE'))).all()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
def getAllLazy(self):
"""Busca todos los objetos de un tipo guardados en la base de datos.
:return: Devuelve una tupla con todos los objetos.
:rtype: tuple[tipo]"""
objects = []
try:
objects = self.session.query(Product)\
.options(noload(Product.colPrecioProd))\
.order_by(asc(collate(Product.name, 'NOCASE'))).all()
except sqlalchemy.exc.DBAPIError, e:
if self.session is not None:
self.session.rollback()
print("Error!", e)
finally:
if self._DAOAlchemy__cerrarSesion:
self.session.close()
return objects
```
#### File: imperial/imperial/__init__.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.dao.DAOAlchemy import DAOAlchemy
from imperial.core.util import rc_style
import PyQt4
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
import ctypes
import logging
import logging.config
import imperial.model.mapper.mapeador
import imperial.qrc_main_window
import os
import sys
from imperial.vista.main.MainWindowImperial import MainWindowImperial
#myappid = 'jorgesaw.Mate_TPV.Mate_TPV.0.6'
#ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
__version__ = '1.0.0'
logging.config.fileConfig('logging.conf')
logger = logging.getLogger(__name__)
class App(_qg.QApplication):
def __init__(self):
super(App, self).__init__(sys.argv)
self.setOrganizationName("Uno.x Inc.")
self.setOrganizationDomain("unox.com.ar")
self.setApplicationName(u"La Imperial Sistema")
self.setWindowIcon(_qg.QIcon(":/favicon.png"))
#self.codec = _qc.QTextCodec.codecForName("UTF-8".encode("ascii", "ignore"))
#_qc.QTextCodec.setCodecForTr(self.codec)
#_qc.QTextCodec.setCodecForCStrings(self.codec)
#_qc.QTextCodec.setCodecForLocale(self.codec)
#self.translators = []
#VERRRRRRRRRRRRRRRR
#loads all the translations
#locale = _qc.QLocale.system().name()
#for tr_name in ( locale, 'qt_'+locale):
# if not self.loadTranslation(tr_name):
#new_tr_name = tr_name.rsplit('_', 1)[0]#If en_GB doesnt load, try to load en
#self.loadTranslation(new_tr_name)
# pass
#print(QtCore.QTextCodec.codecForCStrings().name())
#test the translation
#dont use unicode
logger.info(self.tr("Loading ...".encode('utf-8')))
#I've decided that is better to have consistence than pain
#_qc.QLocale.setDefault(_qc.QLocale.c())
#sets a style (which is not the same as stylesheet)
#the default style (windows) doesn't allow to use transparent background on widgets
#self.setStyle("plastique")
#Some built-in stylesheets
#Some built-in stylesheets
self.style = 0
#self.styles = ( ":/data/darkorange.qss", ":/data/style.qss",":/data/levelfour.qss", 'user.qss')
self.styles = {'Por defecto': None,
"darkorange": ":/data/darkorange.qss", }
#sets the first stylesheet
#self.changeStyle()
self.window = MainWindowImperial(self)
def run(self):
#Enters the main loop
self.window.showMaximized()
return self.exec_()
def changeStyle(self):
if self.sender():
if self.sender().data():
self.syle = int(self.sender().data())
style = rc_style.cargarStyleSheet(self.styles.values()[self.style])
self.setStyleSheet(style)
def loadTranslation(self, name, path=':/data/trans/'):
"""Loads a translation file, it must be a ".qm" file
by default it loads it from resource file
it only loads ONE translation
"""
return
logger.info("Loading translation '%s' in '%s'"%(name, path))
trans = _qc.QTranslator()
if trans.load(name, path):
self.installTranslator(trans)
self.translators.append(trans)
logger.info("Translation loaded ok")
return True
else:
logger.error("Couldn't load translator %s"%name)
return False
def showWindow(self, wnd):
if len(self.window.mdiArea.subWindowList()) > 0:
self.window.mdiArea.closeAllSubWindows()
self.window.mdiArea.addSubWindow(wnd)
wnd.show()
def isWindowActived(self, tipo):
u""""""
for wnd in self.window.mdiArea.subWindowList():
if isinstance(wnd, tipo):
return True
return False
def runApp():
return App().run()
def run():
return runApp()
if __name__=='__main__':
sys.exit(run())
```
#### File: imperial/model/ModeloVentaDiaria.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.model.datamodel.dataModel import DataModel
from imperial.dao.DAOProducto import DAOProducto
from imperial.model.models import VentaProd, Category, Venta
from imperial.model.modeltable.modeldatatable.tmVentaDiariaVariedades import \
TMVentaDiariaVariedades
from imperial.util import variables
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
import datetime
class ModeloVentaDiaria(object):
u"""Modelo que controla los dos modelos de la venta diaria."""
def __init__(self, modelo=None):
self.modelo = modelo
self.modeloVarios = TMVentaDiariaVariedades(modelo)
self.modeloVarios.modeloPadre = self
self.modeloVarios.categoria = Category.VARIOS
self.modeloPan = TMVentaDiariaVariedades(modelo)
self.modeloPan.modeloPadre = self
self.modeloPan.categoria = Category.PAN
self.lstModelosHijos = (self.modeloVarios, self.modeloPan)
self.ventana = None
self.datosNuevos = True
#self.mov_venta_prod = None
self.venta = None
def buscarDatos(self, lstDatos):
fecha = lstDatos[0]
vendedor = lstDatos[1]
mapParam = {'FECHA_MOV': fecha, 'VENDEDOR': vendedor}
self.buscarDatosByFechaVendedor(mapParam)
#self.datos = []
self.modeloVarios.datos = []
self.modeloPan.datos = []
if self.venta:
#Filtrar datos por categoría.
#listasDatos = self.datosPorCategoria(
# self.mov_venta_prod.colVentasProd)
#listasDatos = self.datosPorCategoria(self.venta.colVentasProd)
#listasDatos = self.venta.colVentasProd
#listasDatos = self.datosPorCategoria(self.modelo.dao.ventaProdByVenta(
# {'ID_VENTA': self.venta.id}))
#Le paso los datos a cada modelo hijo.
#for ventaProd in listasDatos:
# ventaProd.setPrecioUnitario(fecha)
# ventaProd.totVentas()
# ventaProd.cantNeta()
#ventaProd.calcular()
#print('::::', len(self.venta.colVentasProd))
#for ventaProd in self.venta.colVentasProd:
# print(':', ventaProd)
listasDatos = self.datosPorCategoria(self.venta.colVentasProd, fecha)
if len(listasDatos[0]) == 0: #Si la venta fué creada en la vista Saldos y no hay productos.
self.generarListaProductos(fecha)
else:
#print('ACA')
for modeloHijo, lstDatos in zip(self.lstModelosHijos, listasDatos):
modeloHijo.datos = lstDatos
#modeloHijo.actualizarTotalDatos(fecha)
self.avisarCambioDatos(modeloHijo.categoria)
self.datosNuevos = False
else:
self.crearMovVentaEnBlanco(fecha, vendedor)
self.avisarCambioDatos(Category.VARIOS)
self.avisarCambioDatos(Category.PAN)
self.datosNuevos = True
self.reset()
def cerrarSesionDAO(self):
self.modelo.dao.cerrarSesion()
def showVentana(self, msg):
self.ventana.showVentana(msg)
def buscarDatosByFecha(self, fecha):
#self.mov_venta_prod = self.modelo.dao.movVentaProdByFechaVende({
# 'FECHA_MOV': fecha})
self.venta = self.modelo.dao.ventaByFecha({
'FECHA_MOV': fecha})
def datos2Array(self):
lstDatos = []
for dato in self.modeloVarios.datos:
if dato.tot_venta > 0:
lstDato = [dato.producto.name[0:36],]
for i in range(VentaProd.CANT_CARGAS):
lstDato.append(dato.colCargaProd[i].cant)
lstDato.append(dato.tot_venta)
lstDato.append(dato.devoluciones)
lstDato.append(dato.cant_neta)
lstDato.append(dato.producto.precio)
lstDato.append(dato.costo)
lstDatos.append(lstDato)
lstDatos.append(['TOTAL VENTA VARIOS', None, None, None,
None, None, None, None, None, None,
None, self.modeloVarios.total_ventas])
lst_resaltados = [len(lstDatos) - 1,]
for dato in self.modeloPan.datos:
if dato.costo > 0:
lstDato = [dato.producto.name[0:36],]
for i in range(VentaProd.CANT_CARGAS):
lstDato.append(dato.colCargaProd[i].cant)
lstDato.append(dato.tot_venta)
lstDato.append(dato.devoluciones)
lstDato.append(dato.cant_neta)
lstDato.append(dato.producto.precio)
lstDato.append(dato.costo)
lstDatos.append(lstDato)
lstDatos.append(['TOTAL VENTA PAN', None, None, None,
None, None, None, None, None, None,
None, self.modeloPan.total_ventas])
lstDatos.append(['TOTAL VENTAS', None, None, None,
None, None, None, None, None, None,
None,
self.modeloPan.total_ventas + self.modeloVarios.total_ventas])
lst_resaltados.append(len(lstDatos) - 2)
lst_resaltados.append(len(lstDatos)- 1)
return (variables.LST_HEADER_VENTA_DIARIA, lstDatos, lst_resaltados)
def datos2ArrayBlank(self):
lstDatos = []
for dato in self.modeloPan.datos:
lstDatos.append([dato.__str__(), None])
return (variables.LST_HEADER_VENTA_DIARIA, lstDatos)
def buscarDatosByFechaVendedor(self, mapParam):
#self.mov_venta_prod = self.modelo.dao.\
# movVentaProdByFechaVende(mapParam)
#print('MAP_PARAM:', mapParam['VENDEDOR'].id)
self.venta = self.modelo.dao.ventaByFechaVendedorNotLazy(mapParam)
def datosPorCategoria(self, lstDatosVentaProd, fecha):
u"""Clase que filtra una lista de datos según la categoría."""
lstVarios = []
lstPan = []
for ventaProd in lstDatosVentaProd:
if ventaProd.producto.categoria == Category.VARIOS:
lstVarios.append(ventaProd)
else:
lstPan.append(ventaProd)
ventaProd.setPrecioUnitario(fecha)
ventaProd.totVentas()
ventaProd.cantNeta()
ventaProd.calcular()
return (lstVarios, lstPan)
def crearMovVentaEnBlanco(self, fecha, vendedor):
#uAgregar el histórico de precios.
#self.datos = []
saldo_parcial = 0.0
self.venta = Venta(saldo_parcial, vendedor, fecha)
#self.mov_venta_prod.vendedor = vendedor
self.generarListaProductos(fecha)
def generarListaProductos(self, fecha):
dao = DAOProducto(False)
productosVarios = list(dao.getProductosByCategoria({'categoria': Category.VARIOS,
'activo': True}))
productosPan = list(dao.getProductosByCategoria({'categoria': Category.PAN,
'activo': True}))
for producto in productosVarios:
venta_prod = VentaProd.data2Object([
producto, 0, 0 #Producto, cantidad inicial, devoluciones.
])
#venta_prod.venta = self.venta
self.modeloVarios.datos.append(venta_prod)
self.modeloVarios.actualizarPrecioProd(fecha)
#self.modeloVarios.reset()
for producto in productosPan:
venta_prod = VentaProd.data2Object([
producto, 0, 0 #Producto, cantidad inicial, devoluciones.
])
#venta_prod.venta = self.venta
self.modeloPan.datos.append(venta_prod)
self.modeloPan.actualizarPrecioProd(fecha)
#self.modeloPan.reset()
#self.mov_venta_prod.colVentasProd = self.modeloVarios.datos + self.modeloPan.datos
#self.venta.colVentasProd = self.modeloVarios.datos + self.modeloPan.datos
def buscarDatosVentas(self, lstDatos):
msg = DataModel.LST_MSG[DataModel.MSG_LIST]
self.buscarDatos(lstDatos)
if self.datosNuevos:
msg = DataModel.LST_MSG[DataModel.MSG_NOT_LIST]
return (self.datosNuevos == False, msg)
def reset(self):
self.modeloVarios.reset()
self.modeloPan.reset()
def avisarCambioDatos(self, categoria):
modeloTabla = self.modeloVarios
if categoria == Category.PAN:
modeloTabla = self.modeloPan
#total = 0.0
#for ventaProd in modeloTabla.datos:
# total = total + ventaProd.costo
self.ventana.avisarCambiosDatos(categoria, modeloTabla.totalVentas())
self.venta.subtotal = 0.0
for modeloTabla in self.lstModelosHijos:
self.venta.subtotal += modeloTabla.total_ventas
#print('SUBTOTAL_VENTAS:', self.venta.subtotal)
def guardarDatos(self, lstDatos):
if self.datosNuevos:
msg = DataModel.LST_MSG[DataModel.MSG_SAVE]
#retorno = self.modelo.dao.insert(self.mov_venta_prod)
self.venta.colVentasProd = self.modeloVarios.datos + self.modeloPan.datos
retorno = self.modelo.dao.insert(self.venta)
if retorno <= 0:
msg = DataModel.LST_MSG[DataModel.MSG_NOT_SAVE]
else:
self.datosNuevos = False
return (retorno > 0, msg)
return self.actualizarDatos(lstDatos)
def actualizarDatos(self, lstDatos):
msg = DataModel.LST_MSG[DataModel.MSG_EDIT]
#print('ACTUALIZAR_SUBTOTAL:', self.venta.subtotal)
#retorno = self.modelo.dao.update(self.mov_venta_prod)
retorno = self.modelo.dao.update(self.venta)
if retorno <= 0:
msg = DataModel.LST_MSG[DataModel.MSG_NOT_EDIT]
return (retorno > 0, msg)
```
#### File: modeltable/modeldatatable/mdtCategory.py
```python
from __future__ import absolute_import, print_function, unicode_literals
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
import sys
sys.path.append('./')
NAME, = range(1)
lstHeader = [NAME,]
lstTitHeader = ['Rubro',]
class ModeloDatosTablaCategory(object):
u""""""
def __init__(self):
pass
def columnCount(self, index=_qc.QModelIndex()):
return 1
def data(self, index, dato, role=_qc.Qt.DisplayRole):
column = index.column()
if role == _qc.Qt.DisplayRole:
if column == NAME:
return _qc.QVariant(dato.name)
elif role == _qc.Qt.TextAlignmentRole:
return _qc.QVariant(int(_qc.Qt.AlignLeft|_qc.Qt.AlignVCenter))
elif role == _qc.Qt.BackgroundColorRole:
row = index.row()
if row % 2 == 0: # Si es par.
return _qc.QVariant(_qg.QColor(_qc.Qt.darkGray))
#else:
#return QVariant(QColor(Qt.darkBlue))
elif role == _qc.Qt.ToolTipRole:
if column == NAME:
return _qc.QVariant("<font color='#FF0000'>" + dato.name + "</font>")
return _qc.QVariant()
def headerData(self, section, orientation, role=_qc.Qt.DisplayRole):
if role == _qc.Qt.TextAlignmentRole:
if orientation == _qc.Qt.Horizontal:
return _qc.QVariant(int(_qc.Qt.AlignLeft|_qc.Qt.AlignVCenter))
return _qc.QVariant(int(_qc.Qt.AlignRight|_qc.Qt.AlignVCenter))
if role != _qc.Qt.DisplayRole:
return _qc.QVariant()
if orientation == _qc.Qt.Horizontal:
if section in lstHeader:
return _qc.QVariant(lstTitHeader[section])
return _qc.QVariant(int(section + 1))
```
#### File: modeltable/modeldatatable/mdtMateTPVGui.py
```python
from __future__ import absolute_import, print_function, unicode_literals
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
NAME, CODE, PRICE, QUANTITY, SUBTOTAL= range(5)
lstHeader = [NAME, CODE, PRICE, QUANTITY, SUBTOTAL]
lstTitHeader = ['Nombre', u'Código', 'Precio', 'Cantidad', 'Subtotal']
class MDTMateTPVGui(object):
u""""""
def __init__(self):
pass
def columnCount(self, index=_qc.QModelIndex()):
return 5
def data(self, index, dato, role=_qc.Qt.DisplayRole):
column = index.column()
if role == _qc.Qt.DisplayRole:
if column == NAME:
return _qc.QVariant(dato.product.name)
elif column == CODE:
return _qc.QVariant(dato.product.code)
elif column == PRICE:
return _qc.QVariant((_qc.QString("%L1").\
arg('$ {:.2f}'.format(dato.product.price))))
elif column == QUANTITY:
return _qc.QVariant(dato.quantity)
elif column == SUBTOTAL:
return _qc.QVariant((_qc.QString("%L1").\
arg('$ {:.2f}'.format(dato.calculate()))))
elif role == _qc.Qt.TextAlignmentRole:
return _qc.QVariant(int(_qc.Qt.AlignLeft|_qc.Qt.AlignVCenter))
elif role == _qc.Qt.BackgroundColorRole:
row = index.row()
if row % 2 == 0: # Si es par.
return _qc.QVariant(_qg.QColor(250, 230, 250))
else:
return _qc.QVariant(_qg.QColor(210, 230, 230))
elif role == _qc.Qt.ToolTipRole:
pass
return _qc.QVariant()
def headerData(self, section, orientation, role=_qc.Qt.DisplayRole):
if role == _qc.Qt.TextAlignmentRole:
if orientation == _qc.Qt.Horizontal:
return _qc.QVariant(int(_qc.Qt.AlignLeft|_qc.Qt.AlignVCenter))
return _qc.QVariant(int(_qc.Qt.AlignRight|_qc.Qt.AlignVCenter))
if role != _qc.Qt.DisplayRole:
return _qc.QVariant()
if orientation == _qc.Qt.Horizontal:
if section in lstHeader:
return _qc.QVariant(lstTitHeader[section])
return _qc.QVariant(int(section + 1))
```
#### File: model/modeltable/tableReporteSaldoDiario.py
```python
from imperial.core.model.modelotable.modeloTabla import ModeloTabla
from imperial.core.util.ManejaFechas import ManejaFechas
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
class TableReporteSaldoDiario(ModeloTabla):
def __init__(self, modelo, modeloDatosTabla, parent=None):
super(TableReporteSaldoDiario, self).__init__(modelo, modeloDatosTabla, parent=None)
self.modeloDatosTabla.tabla_reporte = self
def buscarDatosSaldos(self, lstDatos):
fecha_busqueda = lstDatos[0]
self.lstSaldos, msg = self.modelo.saldosEntreFechas({'INICIO': fecha_busqueda,
'CIERRE': ManejaFechas.sumarFechasDias(
fecha_busqueda, 6)})
if not self.lstSaldos:
print('SIN SALDOS', self.lstSaldos, msg)
self.clear()
return (self.lstSaldos == True, msg)
self.agruparDatosVentaEgresos()
self.datos = []
print('LEN_SALDOS:', len(self.lstSaldos))
self.lstNombres = [nombre.__str__() for nombre in self.lstSaldos[0].lstDatosAgrupados]
#print('NOMBRES:', lstNombres)
#VENTAS
self.cant_ventas = len(self.lstSaldos[0].colTotalVentas)
print('LEN_SALDOS:', len(self.lstSaldos))
#Crear totales de mov_venta_prod
for saldo in self.lstSaldos:
print('LEN_SALDOS_FOR_TOT:', len(self.lstSaldos))
for mov_venta in saldo.colMovVentasProd:
#uAtributo creado Ad Hoc para mantener compatibilidad en el evento data()
mov_venta.valor = mov_venta.calcularSaldo()
saldo.saldoVentas()
saldo.saldoMovVentasProd()
for i in xrange(self.cant_ventas):
rowDatos = []
rowDatos.append(self.lstSaldos[0].colTotalVentas[i].__str__())
print(self.lstSaldos[0].colTotalVentas[i].__str__())
#print('LEN_SALDOS_XRANGE:', len(self.lstSaldos))
for saldo in self.lstSaldos:
#print('LEN_SALDOS_EN_SALDOS:', len(self.lstSaldos))
#print('SALDOOO:', saldo)
if saldo:
rowDatos.append(saldo.colTotalVentas[i].valor)
else:
rowDatos.append('')
#print('------VALOR:', saldo.colTotalVentas[i].valor)
rowDatos.append(self.totFila(rowDatos[1:]))
self.datos.append(rowDatos)
rowDatos = ['TOTAL VENTAS',]
rowDatos += [saldo.saldoTotalVentas() for saldo in self.lstSaldos]
rowDatos.append(self.totFila(rowDatos[1:]))
self.datos.append(rowDatos)
#EGRESOS
self.cant_egresos = len(self.lstSaldos[0].colMovEgresos)
for i in xrange(self.cant_egresos):
rowDatos = []
rowDatos.append(self.lstSaldos[0].colMovEgresos[i].__str__())
for saldo in self.lstSaldos:
if saldo:
rowDatos.append(saldo.colMovEgresos[i].valor)
else:
rowDatos.append('')
rowDatos.append(self.totFila(rowDatos[1:]))
self.datos.append(rowDatos)
rowDatos = ['TOTAL EGRESOS',]
rowDatos += [saldo.saldoEgresos() for saldo in self.lstSaldos]
rowDatos.append(self.totFila(rowDatos[1:]))
self.datos.append(rowDatos)
rowDatos = ['TOTAL NETO',]
rowDatos += [saldo.saldo() for saldo in self.lstSaldos]
rowDatos.append(self.totFila(rowDatos[1:]))
self.datos.append(rowDatos)
###########
#self.modeloDatosTabla.datos = self.datos
#print self.datos
print(self.datos)
self.reset()
def cambiarNombresHeader(self):
#u Método que cambia los nombres a las columnas d ela tabla.
pass
def agruparDatosVentaEgresos(self):
for saldo_diario in self.lstSaldos:
saldo_diario.colTotalVentas = saldo_diario.colMovVentasProd + saldo_diario.colVentas
saldo_diario.lstDatosAgrupados = saldo_diario.colTotalVentas + \
saldo_diario.colMovEgresos
def totFila(self, lstValores):
tot = 0.0
for valor in lstValores:
tot += valor
return tot
def totColumnas(self):
lstTotales = []
fin = len(self.lstNombres)
print ()
for col in range(1, 9):
tot = 0.0
for i in range(0, len(self.datos)):
print(i, col)
tot += self.datos[i][col]
lstTotales.append(tot)
return lstTotales
```
#### File: gui/dlg/NewCategoryDlg.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.model.models import Category
from imperial.vista.gui.dlg.ui_dlgNuevoRubro import Ui_DlgNuevoRubro
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
from imperial.dao.DAOCategory import DAOCategory
MAC = hasattr(_qg, "qt_mac_set_native_menubar")
class NewCategoryDlg(_qg.QDialog, Ui_DlgNuevoRubro):
u"""Diálogo para agregar un nuevo rubro."""
def __init__(self, modelo=None, parent=None):
super(NewCategoryDlg, self).__init__(parent)
# Crea SIGNAL-SLOTS para conectar widgets del formulario con métodos de nuestra subclase.
self.setupUi(self)
self.modelo = modelo
self.buttonBox.button(_qg.QDialogButtonBox.Ok).setText("&Agregar")
self.buttonBox.button(_qg.QDialogButtonBox.Cancel).setText("&Cancelar")
if not MAC:
self.buttonBox.button(_qg.QDialogButtonBox.Ok).setFocusPolicy(_qc.Qt.NoFocus)
self.buttonBox.button(_qg.QDialogButtonBox.Cancel).setFocusPolicy(_qc.Qt.NoFocus)
self.updateUi()
@_qc.pyqtSlot("QString")
def on_lineRubro_textEdited(self, text):
self.updateUi()
def data(self):
return [unicode(self.lineRubro.text()),]
def accept(self):
if self.modelo:
self.guardarDatos()
else:
_qg.QDialog.accept(self)
def guardarDatos(self):
tit = u"Nuevo Rubro"
ok, msg = self.modelo.guardarDatos(self.data())
if ok:
_qg.QMessageBox.information(self, tit, msg)
self.buttonBox.button(_qg.QDialogButtonBox.Ok).setEnabled(False)
self.resetValues()
else:
_qg.QMessageBox.information(self, tit, msg)
def resetValues(self):
self.lineRubro.clear()
def updateUi(self):
enable = not (self.lineRubro.text().isEmpty())
self.buttonBox.button(_qg.QDialogButtonBox.Ok).setEnabled(enable)
```
#### File: gui/dlg/ResumenFiscalDlg.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from imperial.core.gui.filterReturn import FilterReturn
from imperial.core.util import printer
from imperial.core.util.ManejaFechas import ManejaFechas
from imperial.model.informes.EditCalc import EditCalcResumenFiscal
from imperial.model.modeltable import tableModelResumenFiscal
from imperial.model.modeltable.tableModelResumenFiscal import \
ResumenFiscalDelegate
from imperial.vista.gui.dlg.ui_dlgResumenFiscal import Ui_DlgResumenFiscal
import PyQt4.QtCore as _qc
import PyQt4.QtGui as _qg
import datetime
import os
import tempfile
MAC = hasattr(_qg, "qt_mac_set_native_menubar")
class ResumenFiscalDlg(_qg.QDialog, Ui_DlgResumenFiscal):
u"""Diálogo."""
def __init__(self, modelo=None, parent=None):
super(ResumenFiscalDlg, self).__init__(parent)
# Crea SIGNAL-SLOTS para conectar widgets del formulario con métodos de nuestra subclase.
self.setupUi(self)
self.modelo = modelo
self.resumenTableView.setModel(modelo)
self.resumenTableView.setItemDelegate(ResumenFiscalDelegate(self))
self.modelo.tableView = self.resumenTableView
newIndex = self.resumenTableView.model().index(0, 1)
#self.saldosTableView.selectionModel().select(newIndex, _qc.QItemSelectionModel.Select)
self.resumenTableView.setCurrentIndex(newIndex)
if not MAC:
self.guardarPushButton.setFocusPolicy(_qc.Qt.NoFocus)
self.cerrarPushButton.setFocusPolicy(_qc.Qt.NoFocus)
self.fechaDateEdit.setDate(datetime.date.today())
self.filterReturn = FilterReturn()
self.resumenTableView.installEventFilter(self.filterReturn)
self.connect(self.resumenTableView, _qc.SIGNAL("enterPressed()"),
self.cambiarFila)
self.fechaDateEdit.installEventFilter(self.filterReturn)
self.connect(self.fechaDateEdit, _qc.SIGNAL("enterPressed()"),
self.resumenTableView.setFocus)
self.guardarPushButton.clicked.connect(self.guardarDatos)
self.fechaDateEdit.dateChanged.connect(self.buscarDatos)
self.resumenTableView.horizontalHeader().setStretchLastSection(True)
vHeaderResumen = self.resumenTableView.verticalHeader()
vHeaderResumen.setResizeMode(_qg.QHeaderView.Fixed)
vHeaderResumen.setDefaultSectionSize(24)
self.setMinimumSize(600, 550)
self.fechaDateEdit.setFocus()
self.updateUi()
def keyPressEvent(self, e):
if e.key() == _qc.Qt.Key_Escape:
e.ignore()
else:
_qg.QDialog.keyPressEvent(self, e)
#e.accept()
def cambiarFila(self):
index = self.resumenTableView.currentIndex()
next_index = self.resumenTableView.model().index(index.row() + 1, index.column())
if index.row() + 1 >= self.modelo.rowCount():
next_index = self.resumenTableView.model().index(0, index.column())
self.resumenTableView.setCurrentIndex(next_index)
@_qc.pyqtSlot()
def on_cerrarPushButton_clicked(self):
self.parentWidget().close()
_qg.QDialog.accept(self)
@_qc.pyqtSlot()
def on_resumenPushButton_clicked(self):
self.generarReporte()
@_qc.pyqtSlot()
def on_resumenBlancoPushButton_clicked(self):
self.generarReporteEnBlanco()
@_qc.pyqtSlot()
def guardarDatos(self):
tit = u"Resumen fiscal"
ok, msg = self.modelo.guardarDatos(self.data())
if ok:
_qg.QMessageBox.information(self, tit, msg)
else:
_qg.QMessageBox.information(self, tit, msg)
@_qc.pyqtSlot("QDate")
def buscarDatos(self, fecha):
tit = u"Resumen Fiscal"
ok, msg = self.modelo.buscarDatosResumenes(self.data())
if not ok:
_qg.QMessageBox.information(self, tit, msg)
def data(self):
return [self.fechaDateEdit.date().toPyDate(),]
def resetValues(self):
self.limpiarTabla()
self.updateUi()
self.fechaDateEdit.setFocus()
def limpiarTabla(self):
self.modelo.limpiarValores()
def resizeColumns(self, columns):
for column in columns:
self.resumenTableView.resizeColumnToContents(column)
def generarReporteEnBlanco(self):
_qg.QApplication.setOverrideCursor(_qg.QCursor(_qc.Qt.WaitCursor))
import tempfile
file = tempfile.mktemp(".xls")
open(file, "w")
#file = os.curdir + variables.PATH_REPORTES + variables.FILE_REPORTE_SALDO
lst_header, array_datos, lst_resaltados = self.modelo.datos2ArrayBlank()#self.modelo.datos2Array()
edit = EditCalcResumenFiscal(file, array_datos, lst_header)
edit.lst_resaltados = lst_resaltados
edit.title = u'RESUMEN FISCAL | ' + ManejaFechas.date2Str(self.fechaDateEdit.date().toPyDate())
edit.abrirArchivo()
edit.crearHoja()
#edit.setFooter(u'&F | ' + ManejaFechas.date2Str(self.fechaDateEdit.date().toPyDate()))
edit.setFooter(u'')
edit.setWidthCol((16 * 256, 18 * 256, 18 * 256, 18 * 256, 18 * 256))
edit.escribirArchivo()
edit.salvarArchivo()
#os.startfile(edit.fileCalc)
self.imprimirReporte(edit.fileCalc)
_qg.QApplication.restoreOverrideCursor()
def generarReporte(self):
self.resumenPushButton.setEnabled(False)
_qg.QApplication.setOverrideCursor(_qg.QCursor(_qc.Qt.WaitCursor))
file = tempfile.mktemp(".xls")
open(file, "w")
#file = os.curdir + variables.PATH_REPORTES + variables.FILE_REPORTE_SALDO
lst_header, array_datos, lst_resaltados = self.modelo.datos2Array()
edit = EditCalcResumenFiscal(file, array_datos, lst_header)
edit.lst_resaltados = lst_resaltados
edit.title = u'RESUMEN FISCAL | ' + ManejaFechas.date2Str(self.fechaDateEdit.date().toPyDate())
edit.abrirArchivo()
edit.crearHoja()
#edit.setFooter(u'&F | ' + ManejaFechas.date2Str(self.fechaDateEdit.date().toPyDate()))
edit.setFooter(u'')
edit.setWidthCol((16 * 256, 18 * 256, 18 * 256, 18 * 256, 18 * 256))
edit.escribirArchivo()
edit.salvarArchivo()
#os.startfile(edit.fileCalc)
self.imprimirReporte(edit.fileCalc)
_qg.QApplication.restoreOverrideCursor()
self.resumenPushButton.setEnabled(True)
def imprimirReporte(self, file):
#os.startfile(file)
printer.defaultPrinter(file)
def updateUi(self):
enable = True
self.guardarPushButton.setEnabled(enable)
#self.resizeColumns(tableModelResumenFiscal.lstHeader)
``` |
{
"source": "jorgesaw/kmarket",
"score": 3
} |
#### File: locations/tests/test_states.py
```python
from django.test import TestCase
# Models
from apps.locations.models import State
# Utilities
from django.utils.text import slugify
class StateManagerTestCase(TestCase):
"""State manager statetest case."""
def setUp(self):
"""Test case setup."""
self.name_state = 'Santa Fé'
self.state = State.objects.create(name=self.state_name)
def test_slug_name_create(self):
"""if slug_name has created at save."""
self.assertIsNone(self.state.slug_name)
def test_slug_name_by_defect(self):
"""Check mode at create slug_name."""
slug_name = slugify(self.state_name)
self.assertEquals(slug_name, self.state.slug_name)
def test_state_inactive_a_delete(self):
"""Check state inactive at delete."""
id_state = self.state.id
self.state.delete()
state = State.objects.get(pk=id).active
self.assertFalse(state.active)
``` |
{
"source": "jorgesaw/kmarket_old",
"score": 2
} |
#### File: kmarket_old/backups/actions.py
```python
def load_restore_data(model_admin, request, queryset):
if queryset.count() > 0:
obj = queryset[0]
return obj.provider_bck.restore_bck()
return False
load_restore_data.short_description = 'Restaurar datos'
```
#### File: kmarket_old/payments/admin.py
```python
from django.contrib import admin
from .models import Bank, CardEntity, CreditCard, DebitCard, Card
# Register your models here.
#admin.site.register(Card)
#admin.site.register(DebitCard)
#admin.site.register(CreditCard)
class BankAdmin(admin.ModelAdmin):
fields = ('name',)
list_display = ('name',)
search_fields = ('name',)
admin.site.register(Bank, BankAdmin)
class CardEntityAdmin(admin.ModelAdmin):
fields = ('name',)
list_display = ('name',)
search_fields = ('name',)
admin.site.register(CardEntity, CardEntityAdmin)
class CardAdmin(admin.ModelAdmin):
fields = ('card_entity', 'bank')
list_select_related = ('card_entity', 'bank')
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
return False
admin.site.register(Card, CardAdmin)
class DebitCardAdmin(admin.ModelAdmin):
fields = ('card_entity', 'bank')
list_select_related = ('card_entity', 'bank')
admin.site.register(DebitCard, DebitCardAdmin)
class CreditCardAdmin(admin.ModelAdmin):
fields = ('card_entity', 'bank')
list_select_related = ('card_entity', 'bank')
admin.site.register(CreditCard, CreditCardAdmin)
```
#### File: kmarket_old/payments/models.py
```python
from django.db import models
from sales.models import Sale
from utils.models_mixin import StatusCreatedUpdatedModelMixin
# Create your models here.
class Bank(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a bank."""
name = models.CharField(max_length=255, verbose_name="Nombre")
class Meta:
ordering = ['-name']
verbose_name = "banco"
verbose_name_plural = "banco"
def __str__(self):
return self.name
class CardEntity(StatusCreatedUpdatedModelMixin, models.Model):
""""Model representing at Card Entity"""
name = models.CharField(max_length=50, verbose_name="Tarjeta")
class Meta:
verbose_name = "entidad de tarjeta"
verbose_name_plural = "entidad de tarjetas"
def __str__(self):
return self.name
class Card(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a apyment of cash."""
name = models.CharField(max_length=25, verbose_name="Tipo")
card_entity = models.ForeignKey(CardEntity, on_delete=models.CASCADE, verbose_name='Entidad')
bank = models.ForeignKey(Bank, on_delete=models.CASCADE, verbose_name='Banco')
class Meta:
verbose_name = "tarjeta"
verbose_name_plural = "tarjetas"
def save(self, *args, **kwargs):
super(Card, self).save(*args, **kwargs)
payment_card = CardPayment(card=self)
payment_card.save()
def __str__(self):
return 'Tarjeta: {} {} {}'.format(self.name, self.card_entity, self.bank)
class CreditCard(Card):
"""Model representing a payment of card."""
class Meta:
verbose_name = "tarjeta de crédito"
verbose_name_plural = "tarjeta de créditos"
def save(self, *args, **kwargs):
self.name = "CREDITO"
super(CreditCard, self).save(*args, **kwargs)
def __str__(self):
return 'Tarjeta CREDITO - {}'.format(super(CreditCard, self).bank)
class DebitCard(Card):
"""Model representing a payment of card."""
class Meta:
verbose_name = "tarjeta de débito"
verbose_name_plural = "tarjeta de débitos"
def save(self, *args, **kwargs):
self.name = "DEBITO"
super(DebitCard, self).save(*args, **kwargs)
def __str__(self):
return 'Tarjeta DEBITO - {}'.format(super(DebitCard, self).bank)
class PaymentType(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a payment type."""
class Meta:
ordering = ['id']
verbose_name = "forma de pago"
verbose_name_plural = "forma de pagos"
@property
def display_subclass_str(self):
for attr in ('cardpayment', 'cashpayment', 'currentpayment'):
if hasattr(self, attr):
return getattr(self, attr).__str__() # igual a self.cardpayment.__str__()
return None
def __str__(self):
return self.display_subclass_str
PESOS_CURRENCY = '$ ARG'
DEFAULT_CURRENCY_CHOICE = PESOS_CURRENCY
CURRENCY_CHOICES = (
(PESOS_CURRENCY, PESOS_CURRENCY),
)
class CashPayment(PaymentType):
"""Model representing a apyment of cash."""
currency = models.CharField(max_length=10, choices=CURRENCY_CHOICES, default=DEFAULT_CURRENCY_CHOICE, verbose_name='Moneda')
class Meta:
verbose_name = "efectivo"
verbose_name_plural = "efectivos"
def __str__(self):
return 'Efectivo: {}'.format(super(Cash, self).amount)
class CurrentPayment(PaymentType):
"""Model representing a current account."""
paid_out = models.BooleanField(default=False, verbose_name='Cuenta Corriente')
class Meta:
verbose_name = "cuenta corriente"
verbose_name_plural = "cuentas corrientes"
def __str__(self):
return 'Cuenta Corriente'
class CardPayment(PaymentType):
"""Model representing a current account."""
card = models.ForeignKey(Card, on_delete=models.CASCADE, verbose_name='Tarjeta')
class Meta:
verbose_name = "tarjeta"
verbose_name_plural = "tarjetas"
def __str__(self):
return self.card.__str__()
class Payment(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a payment."""
amount = models.DecimalField(default=0.0, max_digits=10, decimal_places=2, verbose_name="Importe")
payment_type = models.ForeignKey(PaymentType, on_delete=models.CASCADE, verbose_name='Forma de pago')
sale = models.ForeignKey(Sale, on_delete=models.CASCADE, verbose_name='Venta')
class Meta:
ordering = ['id']
verbose_name = "pago"
verbose_name_plural = "pagos"
``` |
{
"source": "jorgesaw/kstore",
"score": 2
} |
#### File: inventories/forms/categories.py
```python
from django import forms
# Models
from apps.inventories.models import Category
class CategoryForm(forms.ModelForm):
"""Category form."""
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
self.fields['active'].disabled = True
class Meta:
"""Meta class."""
model = Category
fields = ['name', 'active']
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control', 'autofocus': 'autofocus'}),
'active': forms.CheckboxInput(attrs={'class': 'custom-control-input'})
}
```
#### File: inventories/models/products.py
```python
from django.db import models
from django.urls import reverse_lazy
from django.db.models.signals import pre_save
# Models
from .subcategories import SubCategory
# Utilities
from apps.utils.images import custom_upload_to
from apps.utils.models import BaseModelWithSlugName
from apps.utils.text import pre_save_receiver_slug_name
class AbstractProductWithoutPrice(BaseModelWithSlugName):
"""Abstract product with price.
Model representing a product.
"""
code = models.CharField(max_length=30, unique=True, null=True, blank=True, verbose_name="Código interno")
barcode = models.CharField(max_length=30, unique=True, verbose_name="Código de barras")
name = models.CharField(max_length=210, verbose_name="Nombre")
desc = models.CharField(max_length=255, verbose_name="Descripción")
stock = models.PositiveIntegerField(default=0, verbose_name="Stock")
stock_min = models.PositiveIntegerField(default=0, verbose_name="Stock mínimo")
stock_max = models.PositiveIntegerField(default=0, blank=True, verbose_name="Stock máximo")
picture = models.ImageField(upload_to=custom_upload_to, blank=True, verbose_name="Imagen")
subcategory = models.ForeignKey(SubCategory, on_delete=models.SET_NULL, null=True, verbose_name='Subcategoria')
class Meta:
"""Meta class."""
abstract = True
ordering = ['name']
verbose_name = "producto"
verbose_name_plural = "productos"
def get_absolute_url(self):
"""Returns the url to access a particular product instance."""
return reverse_lazy('inventories:product', args=[str(self.slug_name),])
def __str__(self):
return '{}'.format(self.name)
class AbstractProduct(AbstractProductWithoutPrice):
price = models.DecimalField(max_digits=10, decimal_places=3, verbose_name="Precio")
class Meta:
"""Meta class."""
abstract = True
class Product(AbstractProduct):
"""Product class."""
def __str__(self):
return '{} - {:.2f}'.format(self.name, self.price)
pre_save.connect(pre_save_receiver_slug_name, sender=Product)
class ProductWithoutPrice(AbstractProductWithoutPrice):
"""Product without price."""
class Meta:
"""Meta class."""
verbose_name = "producto"
verbose_name_plural = "productos"
pre_save.connect(pre_save_receiver_slug_name, sender=ProductWithoutPrice)
class ProductProxi(Product):
"""Product proxi."""
class Meta:
"""Meta class."""
proxy = True
verbose_name_plural = "Stocks de productos"
class ProductWithoutPriceProxi(ProductWithoutPrice):
"""Product without price proxi."""
class Meta:
"""Meta class."""
proxy = True
verbose_name_plural = "Stocks de productos sin precios"
```
#### File: inventories/views/categories.py
```python
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse, reverse_lazy
from django.shortcuts import redirect
from django.contrib.admin.views.decorators import staff_member_required
from django.utils.decorators import method_decorator
from django.conf import settings
from django.shortcuts import get_object_or_404
# Messages
from django.contrib.messages.views import SuccessMessageMixin
# Filters
from apps.inventories.filters import CategoryFilter
# Filters view
from utils.views.filters import FilteredListView
# Forms
from apps.inventories.forms import CategoryForm
# Models
from apps.inventories.models import Category
# Views
from apps.utils.views_mixin import (
ViewBaseMixin,
ViewListByStatusMixin
)
class CategoryListView(FilteredListView):
"""Category list view."""
model = Category
paginate_by = 25
filterset_class = CategoryFilter
class CategoryDetailView(ViewBaseMixin, DetailView):
"""Category detail view."""
model = Category
@method_decorator(staff_member_required(login_url=settings.LOGIN_URL), name='dispatch')
class CategoryCreate(SuccessMessageMixin, CreateView):
"""Category create."""
model = Category
form_class = CategoryForm
success_url = reverse_lazy('inventories:categories')
success_message = "Datos creados exitosamente."
@method_decorator(staff_member_required(login_url=settings.LOGIN_URL), name='dispatch')
class CategoryUpdate(ViewBaseMixin, SuccessMessageMixin, UpdateView):
"""Category update."""
model = Category
form_class = CategoryForm
template_name_suffix = "_update_form"
success_message = "Datos actualizados exitosamente."
def get_success_url(self):
return reverse_lazy('inventories:category-update', args=[self.object.slug_name]) + "?ok"
@method_decorator(staff_member_required(login_url=settings.LOGIN_URL), name='dispatch')
class CategoryDelete(ViewBaseMixin, SuccessMessageMixin, DeleteView):
"""Category delete."""
model = Category
success_url = reverse_lazy('inventories:categories')
success_message = "Datos eliminados exitosamente."
def get_success_url(self):
return reverse_lazy('inventories:categories') + "?remove"
```
#### File: purchases/models/purchases.py
```python
from decimal import Decimal
# Django
from django.db import models
from django.db.models import Sum, F, FloatField, Max
from django.utils import timezone
from django.db.models.signals import post_save, pre_save
from django.dispatch import receiver
from django.urls import reverse_lazy
# Models
from apps.utils.models import BaseModel
from apps.purchases.models import Supplier
from apps.users.models import User
TAX_CHOICES = [
("0 %", 0.0),
("21 %", 0.21),
("10.5 %", 0.105),
]
class Purchase(BaseModel):
"""Purchase class."""
# Internal number to send
number_purchase = models.CharField(
max_length=18,
blank=True,
null=True,
verbose_name='Número interno'
)
date_purchase = models.DateField(default=timezone.now, verbose_name='Fecha')
observations = models.TextField(blank=True, null=True, verbose_name='Observaciones')
# Invoice data
invoice_num = models.CharField(
max_length=30,
blank=True,
null=True,
verbose_name='N° de factura'
)
is_fiscal = models.BooleanField(default=True, verbose_name="Es fiscal")
invoice_date = models.DateField(verbose_name='Fecha de la factura de compra')
# Values
discount = models.DecimalField(max_digits=10, decimal_places=2, default=0, verbose_name='Descuento')
tax_choices = models.CharField(
blank=True,
null=True,
max_length=2,
choices=TAX_CHOICES,
default="0 %",
verbose_name="IVA"
)
tax = models.DecimalField(max_digits=10, decimal_places=2, default=0.0, verbose_name='Impuesto')
subtotal = models.DecimalField(max_digits=10, decimal_places=2, default=0.0)
total = models.DecimalField(max_digits=10, decimal_places=2, default=0.0)
supplier = models.ForeignKey(Supplier, blank=True, null=True, on_delete=models.SET_NULL, verbose_name='Proveedor')
created_by = models.ForeignKey(User, related_name="purchases", blank=True, null=True, on_delete=models.SET_NULL)
class Meta:
ordering = ['-id',]
verbose_name = 'compra'
verbose_name_plural = 'compras'
def get_absolute_url(self):
"""Returns the url to access a particular product instance."""
return reverse_lazy('purchases:purchase-update', kwargs={'pk': self.pk})
def _calculate_subtotal(self):
# Devuelve un diccionario con un dato cuya key es 'subtotal_purchase'
_subtotal = self.itempurchase_set.all().aggregate(
subtotal_purchase=Sum( ( F('quantity') * F('price') ) - F('discount'), output_field=FloatField() )
)['subtotal_purchase'] or 0
self.subtotal = _subtotal
def calculate_total(self):
self._calculate_subtotal()
_total = float(self.subtotal) - float(self.discount) + float(self.tax)
self.total = Decimal.from_float(_total)
Purchase.objects.filter(id=self.id).update(subtotal=self.subtotal, total=_total)
def __str__(self):
return self.number_purchase
@receiver(post_save, sender=Purchase)
def update_sales_total(sender, instance, **kwargs):
instance.calculate_total()
```
#### File: registration/views/registration.py
```python
from django.shortcuts import render
from django.views.generic import CreateView
from django.views.generic.edit import UpdateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.urls import reverse_lazy
# Forms
from django import forms
from apps.registration.forms import (
UserCreationWithEmail,
ProfileForm,
EmailForm
)
from django.contrib.messages.views import SuccessMessageMixin
# Models
#from apps.registration.models import Profile
from apps.users.models import Profile
class SignUpView(CreateView):
"""Sign up view."""
form_class = UserCreationWithEmail
template_name = 'registration/signup.html'
def get_success_url(self):
return reverse_lazy('login') + '?register'
def get_form(self, form_class=None):
"""
Override in exec time.
"""
form = super(SignUpView, self).get_form()
# Modify at real time.
form.fields['username'].widget = forms.TextInput(
attrs={'class': 'form-control', 'placeholder': 'Nombre de usuario'}
)
form.fields['email'].widget = forms.EmailInput(
attrs={'class': 'form-control', 'placeholder': 'Direción de e-mail'}
)
form.fields['password1'].widget = forms.PasswordInput(
attrs={'class': 'form-control', 'placeholder': '<PASSWORD>aseña'}
)
form.fields['password2'].widget = forms.PasswordInput(
attrs={'class': 'form-control mb-2', 'placeholder': 'Repetir contraseña'}
)
form.fields['first_name'].widget = forms.PasswordInput(
attrs={'class': 'form-control mb-2', 'placeholder': 'Nombre'}
)
form.fields['last_name'].widget = forms.PasswordInput(
attrs={'class': 'form-control mb-2', 'placeholder': 'Apellido'}
)
return form
@method_decorator(login_required, name='dispatch')
class ProfileUpdate(SuccessMessageMixin, UpdateView):
"""Profile update view."""
form_class = ProfileForm
success_url = reverse_lazy('registration:profile')
template_name = 'registration/profile_form.html'
success_message = "Datos de perfil actualizados exitosamente."
def get_object(self):
# Recuperar el objeto que se va a editar.
profile, created = Profile.objects.get_or_create(user=self.request.user)
return profile
@method_decorator(login_required, name='dispatch')
class EmailUpdate(SuccessMessageMixin, UpdateView):
"""Email update view."""
form_class = EmailForm
template_name = 'registration/profile_email_form.html'
success_message = "Email actualizado exitosamente."
def get_success_url(self):
#from django.contrib import messages
#messages.success(self.request, 'Email actualizado')
return reverse_lazy('registration:profile') #+ '?change_email'
def get_object(self):
# Recuperar el objeto que se va a editar.
return self.request.user
def get_form(self, form_class=None):
"""
Override in exec time.
"""
form = super(EmailUpdate, self).get_form()
# Modify at real time.
form.fields['email'].widget = forms.EmailInput(
attrs={'class': 'form-control', 'placeholder': 'Direción de e-mail', 'id': 'inputEmail'}
)
return form
``` |
{
"source": "jorgesaw/mate_tpv",
"score": 3
} |
#### File: core/util/RecursosCSS.py
```python
from __future__ import absolute_import, print_function, unicode_literals
class RecursosCSS(object):
CSS = ''
@staticmethod
def cargarRecursoCss(file):
with open(file, 'r') as fh:
RecursosCSS.CSS = fh.read()
```
#### File: core/dao/alchemyUtil.py
```python
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session
from contextlib import contextmanager
import core.dao.config as config
u"""Módulo factoría para crear una sesión en SQLAlchemy.
:author: <NAME>
:version: 1.0.0"""
__docformat__ = "reestructuredtext"
__version__ = "1.0.0"
cadenaConexion = config.cargarConfig()
engine = create_engine(cadenaConexion, echo=True)
#Session = sessionmaker(bind=engine)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
def session():
u"""Clase que crea una sesión para realizar las transacciones."""
session = Session()
return session
def conn():
u"""Clase que crea una conexión para realizar las transacciones."""
conn = engine.connect()
return conn
@contextmanager
def session_scope():
"""Provide a transactional scope around a series of operations."""
session = Session()
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
#with session_scope() as session:
# print(session)
```
#### File: model/dto/__init__.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from mate.core.util import util
from mate.vista import factoria
PATH_MODELS = 'mate.model.dto.'
MODEL_NAMES = {factoria.ARTICLE_NEW: 'Article',
factoria.ARTICLE_SHOW: 'Article'}
IDX_ARTICLE = range(1)
def getClaseModelo(tipo):
return util.import_class(PATH_MODELS + MODEL_NAMES[tipo])
```
#### File: model/mapper/mapeador.py
```python
from __future__ import absolute_import, print_function, unicode_literals
from mate.model.models import Bill, Category, Item, Product, Licencia, TypePay
from sqlalchemy import Column, Integer, String, Float, Date, Boolean, Time, \
ForeignKey, Table, MetaData, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import mapper, relationship, backref
meta = MetaData()
categoriaTabla = Table('CATEGORIES', meta,
Column('id_category', Integer, primary_key=True),
Column('name', String(50), nullable=False),
mysql_engine='InnoDB'
)
mapper(Category, categoriaTabla, properties={
'id': categoriaTabla.c.id_category,
'name': categoriaTabla.c.name,
'products': relationship(Product,
lazy="dynamic",
cascade="all, delete-orphan")
})
productoTabla = Table("PRODUCTS", meta,
Column('id_product', Integer, primary_key=True),
Column('code', String(13), unique=True),
Column('name', String(50), nullable=False),
Column('external_code', String(25), nullable=True, unique=True),
Column('price', Float, nullable=False),
Column('buy_price', Float, nullable=False),
Column('stock', Integer, nullable=False),
Column('min_stock', Integer, nullable=False),
Column('ideal_stock', Integer, nullable=False),
Column('description', String(50), nullable=True),
Column('id_category', Integer,
ForeignKey('CATEGORIES.id_category', ondelete="CASCADE", onupdate="CASCADE")),
mysql_engine='InnoDB'
)
mapper(Product, productoTabla, properties={
'id': productoTabla.c.id_product,
'code': productoTabla.c.code,
'name': productoTabla.c.name,
'external_code': productoTabla.c.external_code,
'price': productoTabla.c.price,
'buy_price': productoTabla.c.buy_price,
'stock': productoTabla.c.stock,
'min_stock': productoTabla.c.min_stock,
'ideal_stock': productoTabla.c.ideal_stock,
'description': productoTabla.c.description,
'category': relationship(Category,
backref='CATEGORIES',
lazy="joined")
})
itemTabla = Table("ITEMS", meta,
Column('id', Integer, primary_key=True),
Column('price', Float, nullable=False),
Column('unit_price', Float, nullable=False),
Column('quantity', Integer, nullable=False),
Column('code_prod', Integer, ForeignKey('PRODUCTS.code')),
Column('number', Integer, ForeignKey('BILLS.number')),
mysql_engine='InnoDB'
)
mapper(Item, itemTabla, properties={
'id': itemTabla.c.id,
'price': itemTabla.c.price,
'unit_price': itemTabla.c.unit_price,
'quantity': itemTabla.c.quantity,
'product': relationship(Product,
uselist=False),
'bill': relationship(Bill,
backref='BILLS',
lazy="joined"),
})
billTabla = Table("BILLS", meta,
#Column('id', Integer, primary_key=True),
#Column('number', Integer, unique=True),
Column('number', Integer, primary_key=True),
Column('date', Date, nullable=False),
Column('subtotal', Float),
Column('total', Float),
mysql_engine='InnoDB'
)
mapper(Bill, billTabla, properties={
#'id': billTabla.c.id,
'id': billTabla.c.number,
#'number': billTabla.c.number,
'date': billTabla.c.date,
'subtotal': billTabla.c.subtotal,
'total': billTabla.c.total,
'items': relationship(Item,
lazy="dynamic",
cascade="all, delete-orphan")
})
licenciaTabla = Table("LICENCIAS", meta,
Column('id', Integer, primary_key=True),
Column('cant', Integer, nullable=False),
mysql_engine='InnoDB'
)
mapper(Licencia, licenciaTabla, properties={
'id': licenciaTabla.c.id,
'cant': licenciaTabla.c.cant,
})
def main():
import mate.core.dao.config as config
engine = create_engine(config.cargarConfig())
meta.drop_all(engine)
meta.create_all(engine)
print(u"Se crearon las tablas con éxito")
if __name__ == '__main__':
main()
```
#### File: mate/model/modelTPV.py
```python
from __future__ import absolute_import, unicode_literals, print_function
from mate.core.model.datamodel.dataModel import DataModel
from mate.core.model.genericmodel.genericModel import Model
from mate.model.models import Bill, Item, Product
class ModelTPV(Model):
u''''''
def __init__(self, dao=None, dataClase=None, modeloDatos=None):
super(ModelTPV, self).__init__(dao, dataClase, modeloDatos)
self.bill = None
def nuevaVenta(self):
self.bill = Bill()
def cerrarVenta(self):
#self.bill.close()
#self.bill, msg = Model.guardarDato(self, self.bill)
#uPara evitar cambios en el stock de los productos
#u si la factura no puede guardarse.
bill = self.bill.copy()
bill.close()
#self.bill, msg = Model.guardarDato(self, self.bill)
bill, msg = Model.guardarDato(self, bill)
if bill: #uSi se guardó
self.bill = bill
return (self.bill, msg)
def guardarDatos(self, lstDatos):
msg = DataModel.LST_MSG[DataModel.MSG_NOT_GET_DATO]
item = None
#uBusco el producto en la DB.
prod = self.dao.get(lstDatos[1], Product) #uCódigo del producto.
if prod:
msg = DataModel.LST_MSG[DataModel.MSG_GET_DATO]
#uSi existe el producto creamos un ítem.
item = Item(prod, lstDatos[0])
self.bill.addItem(item)
return (item, msg)
def totalVenta(self):
self.bill.calculate()
return self.bill.total
```
#### File: mate/test/prueba_prod.py
```python
class Producto():
def __init__(self, cod, nombre):
self.cod = cod
self.nombre = nombre
def __str__(self):
return '{0}-{1}'.format(self.cod, self.nombre)
@staticmethod
def data2Object(lstDatos):
prod = Producto(lstDatos[0], lstDatos[1])
return prod
@staticmethod
def object2Data(prod):
return [prod.cod, prod.nombre]
@staticmethod
def editObject(prod, lstDatos):
prod.cod = lstDatos[0]
prod.nombre = lstDatos[1]
clase_prod = Producto
p1 = clase_prod.data2Object([1, "Yerba Amanda"])
print p1.__str__()
print Producto.object2Data(p1)
```
#### File: gui/crud/crudProductDlg.py
```python
from __future__ import absolute_import, print_function, unicode_literals
import PyQt4.QtCore as _qc
from mate.core.vista.crud.CRUD import CRUD
from mate.vista.gui.dlg.NewProductDlg import NewProductDlg
from mate.vista.gui.dlg.EditProductDlg import EditProductDlg
class CrudProductDlg(CRUD):
u"""Dialogo CRUD de productos."""
def __init__(self, modelo, parent=None):
super(CrudProductDlg, self).__init__(modelo, parent)
#self.datosTableView.setItemDelegate(CiudadesDelegate())
self.updateUi()
self.datosTableView.setStyleSheet('background-color: rgb(156, 156, 156);')
#CRUD.resizeColumns(self, (ModeloDatosCiudad.NOMBRE, ModeloDatosCiudad.COD_POSTAL,
#ModeloDatosCiudad.DDN, ModeloDatosCiudad.PROVINCIA))
self.setWindowTitle("CRUD Productos")
@_qc.pyqtSlot()
def on_newPushButton_clicked(self):
addDlg = NewProductDlg()
CRUD.addData(self, addDlg)
@_qc.pyqtSlot()
def on_editPushButton_clicked(self):
editDlg = EditProductDlg()
CRUD.editData(self, editDlg)
@_qc.pyqtSlot()
def on_listPushButton_clicked(self):
CRUD.listData(self)
@_qc.pyqtSlot()
def on_searchPushButton_clicked(self):
pass#CRUD.searchData(self, BusquedaGui.INSTIT)
def updateUi(self):
CRUD.updateUi(self)
``` |
{
"source": "jorgesaw/oclock",
"score": 2
} |
#### File: core/tests/tests_core.py
```python
from django.shortcuts import reverse
from django.test import TestCase
from django.urls import resolve
from django.template.loader import render_to_string
from apps.core.views.core import HomePageView
class HomePageTest(TestCase):
def setUp(self):
self.response = self.client.get(reverse('core:home'))
def tearDown(self):
del self.response
def test_view_url_exists_at_desired_location(self):
response = self.client.get('/')
self.assertTrue(response.status_code, 200)
def test_view_url_accessible_by_name(self):
self.assertTrue(self.response.status_code, 200)
"""
def test_view_class_exists(self):
self.assertTrue(
self.response.resolver_match.func.__name__, HomePageView.__name__
)
"""
def test_view_uses_correct_template(self):
self.assertTemplateUsed(self.response, 'core/home.html')
def test_home_page_returns_correct_html(self):
html = self.response.content.decode('utf-8')
self.assertTrue(html.startswith('<!DOCTYPE html>\n<html lang="es">'))
self.assertIn('<title>Inicio | Kstore</title>', html)
self.assertTrue(html.strip().endswith('</html>'))
if __name__ == '__main__':
unittest.main()
```
#### File: core/views/core.py
```python
from django.shortcuts import render
from django.views.generic import ListView
# Forms
from apps.shows.forms import ShowSearchForm
# Models
from apps.shows.models import Show
def _get_form(request, form_cls, prefix_):
data = request.POST if prefix_ in request.POST else None
return form_cls(data, prefix=prefix_)
class HomePageView(ListView):
"""Home page view."""
model = Show
template_name = "core/home.html"
context_object_name = 'shows_list'
paginate_by = 2
main_search_form = ShowSearchForm
def get_queryset(self):
return Show.objects.all()
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
adds_shows = self.get_queryset()
context['title'] = 'Inicio'
context['adds_shows'] = adds_shows
context['main_search_form'] = self.main_search_form(
prefix='main_search_form'
)
context['secondary_search_form'] = None
return context
def get(self, request, *args, **kwargs):
#shows = Show.objects.all()
# https://stackoverflow.com/questions/4581789/how-do-i-get-user-ip-address-in-django
# https://moonbooks.org/Articles/How-to-get-visitor-ip-address-with-django-/
# x_forwarded_for = self.request.META.get('HTTP_X_FORWARDED_FOR')
# if x_forwarded_for:
# ip = x_forwarded_for.split(',')[0]
# else:
# ip = self.request.META.get('REMOTE_ADDR')
self.object_list = self.get_queryset()
context = self.get_context_data(object_list=self.object_list)
return self.render_to_response(
context
)
def post(self, request, *args, **kwargs):
adds_shows = Show.objects.all()
main_search_form = _get_form(
request,
self.main_search_form, 'main_search_form'
)
#secondary_search_form = _get_form(request, ShowSearchForm, 'secondary_search_form')
if main_search_form.is_bound and main_search_form.is_valid():
city = main_search_form.cleaned_data['city']
event = main_search_form.cleaned_data['event']
name = main_search_form.cleaned_data['name']
shows = Show.objects.filter(
city=city,
event=event,
name__contains=name
)
main_search_form = self.main_search_form(prefix='main_search_form')
else:
shows = Show.objects.all()
return render(
request,
self.template_name,
{
'title': 'Inicio',
# self.context_object_name: shows,
'adds_shows': adds_shows,
'main_search_form': main_search_form,
#'secondary_search_form': secondary_search_form,
}
)
```
#### File: shows/models/images.py
```python
from django.db import models
from django.utils.translation import gettext_lazy as _
# Models
from apps.shows.models import Show
# Utilities
from apps.utils.images import custom_upload_to
class Picture(models.Model):
"""Picture models.
Representing at image by a show.
"""
title = models.CharField(
_('title'),
max_length=50,
null=True,
blank=True
)
image = models.ImageField(
_('image'),
upload_to=custom_upload_to,
null=True,
blank=True,
)
show = models.ForeignKey(Show, on_delete=models.CASCADE)
def __str__(self):
return self.title
```
#### File: socials/forms/links.py
```python
from django import forms
from django.forms import BaseInlineFormSet
from django.forms.models import inlineformset_factory
# Models
from apps.socials.models import UserSocialNetwork
from apps.shows.models import Show
class BaseUserSocialNetworkInlineFormSet(BaseInlineFormSet):
"""Base user social network inline form set."""
def clean(self):
super(BaseUserSocialNetworkInlineFormSet, self).clean()
if any(self.errors):
return
for form in self.forms:
if form.cleaned_data:
username = form.cleaned_data['username']
social = form.cleaned_data['social']
if not username:
raise forms.ValidationError(
'Es obligatorio un nombre de usuario.',
code='without_username'
)
if not social:
raise forms.ValidationError(
'Es obligatorio seleccionar la red social.',
code='without_social_network'
)
class UserSocialNetworkInlineForm(forms.ModelForm):
"""User social network inline form."""
class Meta:
"""Meta class."""
model = UserSocialNetwork
exclude = ('created', 'modified')
UserSocialNetworkInlineFormSet = inlineformset_factory(
Show,
UserSocialNetwork,
form=UserSocialNetworkInlineForm,
formset=BaseUserSocialNetworkInlineFormSet,
fields=['social', 'username'],
extra=1,
can_delete=True
)
```
#### File: tests/links/test_models.py
```python
from django.test import TestCase
# Social utils tests
from utils.tests.models.socials import create_link
class LinkModelTest(TestCase):
"""Test link model."""
def test_repr_by_name(self):
link = create_link()
self.assertEqual(str(link), link.username)
if __name__ == '__main__':
unittest.main()
```
#### File: utils/managers/managers.py
```python
from django.db import models
# Utilities
from django.utils.text import slugify
class SlugNameCreateManager(models.Manager):
"""Slug name create manager.
Used to handle code creation.
"""
def create(self, **kwargs):
"""Add a unique slug name from unique name."""
name = kwargs['name']
kwargs['slug_name'] = slugify(name)
return super(SlugNameCreateManager, self).create(**kwargs)
```
#### File: utils/reports/init_config_xhtml2pdf.py
```python
import os
# Django
from django.conf import settings
def link_callback(uri, rel):
"""
Convert HTML URIs to absolute system paths so xhtml2pdf can access those
resources
"""
# use short variable names
sUrl = settings.STATIC_URL # Typically /static/
sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/
mUrl = settings.MEDIA_URL # Typically /static/media/
mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/
# convert URIs to absolute system paths
if uri.startswith(mUrl):
path = os.path.join(mRoot, uri.replace(mUrl, ""))
elif uri.startswith(sUrl):
path = os.path.join(sRoot, uri.replace(sUrl, ""))
else:
return uri # handle absolute uri (ie: http://some.tld/foo.png)
# make sure that file exists
if not os.path.isfile(path):
raise Exception(
'media URI must start with %s or %s' % (sUrl, mUrl)
)
return path
```
#### File: models/users/users.py
```python
from django.contrib.auth import get_user_model
PASSWORD = '<PASSWORD>!'
def create_user(username='<EMAIL>', password=PASSWORD, is_staff=False):
return get_user_model().objects.create_user(
username=username,
email=username,
first_name='Test',
last_name='User',
password=password,
is_staff=is_staff
)
```
#### File: views/mixins/custom_delete_views.py
```python
from django.http import HttpResponseRedirect
class SoftDeleteViewMixin:
"""Soft delete view mixin.
Set status model at inactive.
"""
def delete(self, request, *args, **kwars):
self.object = self.get_object()
success_url = self.get_success_url()
self.object.soft_delete()
return HttpResponseRedirect(success_url)
``` |
{
"source": "jorgesaw/robotkmarket",
"score": 3
} |
#### File: robotkmarket/locations/models.py
```python
from django.db import models
from django.urls import reverse_lazy
from utils.models_mixin import StatusCreatedUpdatedModelMixin
# Create your models here.
class State(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a state."""
name = models.CharField(max_length=210, verbose_name="Nombre")
class Meta:
ordering = ['name']
verbose_name = "provincia"
verbose_name_plural = "provincias"
def get_absolute_url(self):
"""Returns the url to access a particular state instance."""
return ""#reverse_lazy('article-detail', args=[str(self.id)])
def soft_delete(self):
self.active = False
self.save()
def __str__(self):
return '{}'.format(self.name)
class City(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a city."""
name = models.CharField(max_length=50, unique=True, verbose_name="Nombre")
zip_city = models.CharField(max_length=30, null=True, blank=True, verbose_name="Código postal")
ddn = models.CharField(max_length=12, null=True, blank=True, verbose_name="Característica")
state = models.ForeignKey(State, default=1, on_delete=models.SET_NULL, null=True, verbose_name="Provincia")
class Meta:
ordering = ['name']
verbose_name = "ciudad"
verbose_name_plural = "ciudades"
def get_absolute_url(self):
"""Returns the url to access a particular city instance."""
return ""#reverse_lazy('article-detail', args=[str(self.id)])
def soft_delete(self):
self.active = False
self.save()
def __str__(self):
return '{}'.format(self.name)
```
#### File: robotkmarket/overheads/models.py
```python
from django.db import models
from django.db.models import Sum, F, FloatField
from django.utils import timezone
from django.db.models.signals import post_save, pre_save, pre_delete
from django.dispatch import receiver
from utils.models_mixin import StatusCreatedUpdatedModelMixin
# Create your models here.
class Overhead(StatusCreatedUpdatedModelMixin, models.Model):
"""Model representing a overhead."""
init_cash = models.DecimalField(default=0.0, max_digits=10, decimal_places=2, verbose_name="Caja inicial")
total = models.DecimalField(default=0.0, max_digits=10, decimal_places=2, verbose_name="Total gastos")
remaining_cash = models.DecimalField(default=0.0, max_digits=10, decimal_places=2, verbose_name="Efectivo restante")
date_overhead = models.DateField(default=timezone.now, unique=True, verbose_name="Fecha")
class Meta:
ordering = ['-date_overhead']
verbose_name = "gasto del día"
verbose_name_plural = "gastos del día"
def calculate_total(self):
tot = self.itemoverhead_set.all().aggregate(
tot_overhead=Sum( F('value'), output_field=FloatField() ) # Devuelve un diccionario con un dato cuya key es 'tot_sale'
)['tot_overhead'] or 0
self.total = tot
remaining = float(self.init_cash) - self.total
self.remaining_cash = remaining
#super(Overhead, self).save(update_fields=['total', 'remaining_cash'])
Overhead.objects.filter(id=self.id).update(total=tot, remaining_cash=remaining)
def __str__(self):
return 'GASTOS: {}'.format(self.date_overhead)
class ItemOverhead(models.Model):
"""Model representing a overhead."""
name = models.CharField(max_length=50, verbose_name="Nombre")
value = models.DecimalField(default=0.0, max_digits=8, decimal_places=2, verbose_name="Valor")
overhead = models.ForeignKey(Overhead, on_delete=models.CASCADE, verbose_name="Gasto")
class Meta:
ordering = ['id']
verbose_name = "gasto"
verbose_name_plural = "gastos"
def delete(self, *args, **kwargs):
overhead = self.overhead
super(ItemOverhead, self).delete(*args, **kwargs)
overhead.calculate_total()
def __str__(self):
return '{}'.format(self.name)
@receiver(post_save, sender=ItemOverhead)
def update_total_overheads_at_item(sender, instance, **kwargs):
instance.overhead.calculate_total()
@receiver(post_save, sender=Overhead)
def update_overheads_total(sender, instance, **kwargs):
instance.calculate_total()
@receiver(pre_delete, sender=ItemOverhead)
def update_total_overhead_delete_item(sender, instance, **kwargs):
instance.overhead.calculate_total()
```
#### File: robotkmarket/registration/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
# Create your models here.
def custom_upload_to(instance, filename):
old_instance = Profile.objects.get(pk=instance.pk)
old_instance.avatar.delete()
return 'profiles_images/' + filename
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
avatar = models.ImageField(upload_to=custom_upload_to, null=True, blank=True, help_text="Enter a avatar image", verbose_name="Avatar")
bio = models.TextField(null=True, blank=True, help_text="Enter a bio", verbose_name="Bio")
link = models.URLField(max_length=210, null=True, blank=True, help_text="Enter a personal website", verbose_name="Website")
class Meta:
verbose_name = "perfil"
verbose_name_plural = "perfiles"
ordering = ['user__username', ]
"""
def get_absolute_url(self):
return ""#reverse_lazy('category-detail', args=[str(self.id)])
def soft_delete(self):
self.active = False
self.save()
def __str__(self):
return self.name
"""
@receiver(post_save, sender=User)
def ensure_profile_exists(sender, instance, **kwargs):
"""
Señal que se encarga de crear un perfil por defecto en caso de que
el usuario se cree una cuenta (post_save) pero nunca ingrese a su perfil.
"""
if kwargs.get('created', False): # Si acaba de crearse un usuario creamos el perfil
Profile.objects.get_or_create(user=instance)
# print("Se acaba de crear un usuario y su perfil enlazado.")
``` |
{
"source": "JorgeSchelotto/TrabajoFinalSeminarioPython",
"score": 2
} |
#### File: TrabajoFinalSeminarioPython/Clases/Palabras.py
```python
__author__ = '<NAME> - <NAME>'
# -*- coding: utf-8 -*-
# Copyright 2018 autors: <NAME>, <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import pygame
class Palabras(pygame.sprite.Sprite):
def __init__(self, ruta, nombre, x, y):
super().__init__()
self.__palabra = nombre
self.__click = False
self.image = pygame.image.load(ruta).convert_alpha()
self.rect = self.image.get_rect()
self.collide = False
self.posX = x
self.posY = y
def getPosX(self):
return self.posX
def getPosY(self):
return self.posY
def getPalabra(self):
return self.__palabra
def getPalabraImagen(self):
return self.image
def setClick(self, bool):
self.__click = bool
def getClick(self):
return self.__click
def getRect(self):
return self.rect
def colli(self, x, y):
if x > 20:
# Achica la imagen
center = self.rect.center
x = x - 1
y = y - 1
self.image = pygame.transform.scale(self.image, (x, y))
self.rect = self.image.get_rect()
self.rect.center = center
self.image = pygame.transform.rotozoom(self.image, -90, 0.8)
elif x <= 20:
# Para que no de x < 0
center = self.rect.center
self.image = pygame.transform.scale(self.image, (0, 0))
self.rect = self.image.get_rect()
self.rect.center = center
self.image = pygame.transform.rotozoom(self.image, -90, 0.5)
def update(self,surface):
"""Controla los eventos y coliciones de los sprites Palabras"""
if not self.getClick() and not self.collide:
self.rect.center = (self.posX, self.posY)
if self.getClick():
#Si se hace click en la imagen
self.rect.center = pygame.mouse.get_pos()
if self.collide:
# Si hay colision
x = self.image.get_rect().size[0]
y = self.image.get_rect().size[1]
self.colli(x,y)
# Saca la imagen de la zona de colición.
if self.image.get_rect().size[0] <= 20:
self.rect.center = (0,0)
surface.blit(self.getPalabraImagen(), self.getRect())
```
#### File: JorgeSchelotto/TrabajoFinalSeminarioPython/Scene.py
```python
class Scene:
"""Representa un escena abstracta del videojuego.
Una escena es una parte visible del juego, como una pantalla
de presentación o menú de opciones. Tiene que crear un objeto
derivado de esta clase para crear una escena utilizable."""
def __init__(self, director):
self.director = director
def on_update(self):
"Actualización lógica que se llama automáticamente desde el director."
raise NotImplemented("Tiene que implementar el método on_update.")
def on_event(self, event):
"Se llama cuando llega un evento especifico al bucle."
raise NotImplemented("Tiene que implementar el método on_event.")
def on_draw(self, screen):
"Se llama cuando se quiere dibujar la pantalla."
raise NotImplemented("Tiene que implementar el método on_draw.")
``` |
{
"source": "JorgeScp/performance",
"score": 2
} |
#### File: performance/evaluation/forms.py
```python
from .models import Test_Assign
from django import forms
from django.contrib.admin.widgets import AdminDateWidget
class Relation_Form(forms.ModelForm):
class Meta:
model = Test_Assign
fields = '__all__'
def __init__(self, *args, **kwargs):
super(Relation_Form, self).__init__(*args, **kwargs)
self.fields['evaluator'].empty_label = "Select"
self.fields['evaluator'].widget.attrs.update({'class': 'form-dropdown', 'id':'selectevaluated'})
self.fields['evaluated'].empty_label = "Select"
self.fields['evaluated'].widget.attrs.update({'class': 'form-dropdown', 'id':'selectevaluator'})
self.fields['relation'].widget.attrs.update({'class': 'form-control'})
self.fields['done'].widget.attrs.update({'class': 'form-control'})
```
#### File: performance/interview/forms.py
```python
from .models import Interview
from django import forms
from django.core.validators import RegexValidator
from django.conf import settings
class InterviewForm(forms.ModelForm):
strengths = forms.CharField(
label="Fortalezas",
widget=forms.TextInput(
attrs={
"class": "form-control"
}
))
oportunities_areas = forms.CharField(
label="Áreas de Oportunidad",
widget=forms.TextInput(
attrs={
"class": "form-control"
}
))
general_question = forms.CharField(
label="¿Qué le sugerirías al evaluado para mejorar su desempeño profesional y personal?",
widget=forms.TextInput(
attrs={
"class": "form-control"
}
))
observations = forms.CharField(
label="Observaciones",
widget=forms.TextInput(
attrs={
"class": "form-control"
}
))
class Meta:
model = Interview
fields = '__all__'
exclude = ('result',)
labels = {
'evaluator':'Evaluador',
'evaluated':'Evaluado',
'dated':'Fecha Evaluación',
#Labels for questions======================
'comunicacion_1':'La información que comparte es clara y asertiva.',
'comunicacion_2':'Es receptivo a las opiniones y sugerencias de los demás.',
'comunicacion_3':'Escucha y presta atención a las conversaciones.',
'comunicacion_4':'Su comunicación escrita es clara, concisa y efectiva.',
'comunicacion_5':'Expresa sus ideas con claridad y respeto a los demás.',
'trabajo_equipo_1':'Se desempeña como un miembro activo del equipo.',
'trabajo_equipo_2':'Motiva y guía al equipo para el logro de los objetivos establecidos.',
'trabajo_equipo_3':'Fomenta el diálogo de manera abierta y directa. ',
'servicio_cliente_1':'Es cortes y amable en el trato que tiene con el cliente',
'servicio_cliente_2':'Se gestionan correctamente las solicitudes para dar respuesta de manera oportuna.',
'servicio_cliente_3':'Realiza de manera oportuna, feedback a los procesos',
'resolucion_problemas_1':'Busca la información suficiente para la toma de decisiones.',
'resolucion_problemas_2':'Es flexible al cambio en las diferentes situaciones que se presentan. ',
'resolucion_problemas_3':'Considera las implicaciones antes de llevar a cabo una acción. ',
'resolucion_problemas_4':'Conserva la calma en situaciones complicadas. ',
'resolucion_problemas_5':'Es propositivo en las diferentes situaciones que requieran soluciones o toma de decisiones de impacto. ',
'liderazgo_1':'Se adapta a trabajar con nuevos procesos y tareas. ',
'liderazgo_2':'No muestra resistencia a las ideas de las demás personas.',
'liderazgo_3':'Comparte su conocimiento, habilidades y experiencia.',
'liderazgo_4':'Comparte el reconocimiento de logros con el resto del equipo.',
'liderazgo_5':'Busca reforzar sus habilidades y trabajar en sus áreas de oportunidad',
'admon_tiempo_1':'Establece prioridades en sus actividades laborales cumpliento con las metas y objetivos establecidos.',
'admon_tiempo_2':'Cumple con los tiempo y forma los proyectos asignados',
'admon_tiempo_3':'Utiliza eficientemente los recursos asignados para llevar a cabo sus actividades. ',
'pensamiento_estrategico_1':'Comprende las implicaciones de sus decisiones en el negocio a corto y largo plazo.',
'pensamiento_estrategico_2':'Determina objetivos y establece prioridades para lograrlos.',
'pensamiento_estrategico_3':'Tiene visión a largo plazo y busca oportunidades paracumplir con las metas de organización desde su área.',
'pensamiento_estrategico_4':'Es percibido por el cliente como una persona confiable que representa a la empresa. ',
'enfoque_resultados_1':'Reconoce y aprovecha las oportunidades que se presentan en las diferentes situaciones.',
'enfoque_resultados_2':'Mantiene altos niveles de estándares de desempeño ',
'enfoque_resultados_3':'Sus resultados son sobresalientes y generan valor a la empresa. ',
'enfoque_resultados_4':'Es detallista con la información que presenta y en pocas ocasiones se detectan inconsistencias o errores. ',
#__________________________________________
}
def __init__(self, *args, **kwargs):
super(InterviewForm, self).__init__(*args, **kwargs)
#self.fields['days'].validators = [RegexValidator(r'^[\d]{1,3}$', message="Value must be an integer and less than 3 numbers long")]
# self.fields['leaveconcept'].empty_label = "Select"
# self.fields['leaveconcept'].widget.attrs.update({'class': 'form-dropdown'})
self.fields['dated'].widget.attrs.update({'class': 'form-control','id':'datepicker'})
self.fields['dated'].input_formats=(settings.DATE_INPUT_FORMATS)
self.fields['evaluator'].empty_label = "Select"
self.fields['evaluator'].widget.attrs.update({'class': 'form-dropdown', 'id':'selectemp'})
self.fields['evaluated'].empty_label = "Select"
self.fields['evaluated'].widget.attrs.update({'class': 'form-dropdown', 'id':'selecte2'})
#Make form control all questions
self.fields['comunicacion_1'].widget.attrs.update({'class': 'form-control'})
self.fields['comunicacion_2'].widget.attrs.update({'class': 'form-control'})
self.fields['comunicacion_3'].widget.attrs.update({'class': 'form-control'})
self.fields['comunicacion_4'].widget.attrs.update({'class': 'form-control'})
self.fields['comunicacion_5'].widget.attrs.update({'class': 'form-control'})
self.fields['trabajo_equipo_1'].widget.attrs.update({'class': 'form-control'})
self.fields['trabajo_equipo_2'].widget.attrs.update({'class': 'form-control'})
self.fields['trabajo_equipo_3'].widget.attrs.update({'class': 'form-control'})
self.fields['servicio_cliente_1'].widget.attrs.update({'class': 'form-control'})
self.fields['servicio_cliente_2'].widget.attrs.update({'class': 'form-control'})
self.fields['servicio_cliente_3'].widget.attrs.update({'class': 'form-control'})
self.fields['resolucion_problemas_1'].widget.attrs.update({'class': 'form-control'})
self.fields['resolucion_problemas_2'].widget.attrs.update({'class': 'form-control'})
self.fields['resolucion_problemas_3'].widget.attrs.update({'class': 'form-control'})
self.fields['resolucion_problemas_4'].widget.attrs.update({'class': 'form-control'})
self.fields['resolucion_problemas_5'].widget.attrs.update({'class': 'form-control'})
self.fields['liderazgo_1'].widget.attrs.update({'class': 'form-control'})
self.fields['liderazgo_2'].widget.attrs.update({'class': 'form-control'})
self.fields['liderazgo_3'].widget.attrs.update({'class': 'form-control'})
self.fields['liderazgo_4'].widget.attrs.update({'class': 'form-control'})
self.fields['liderazgo_5'].widget.attrs.update({'class': 'form-control'})
self.fields['admon_tiempo_1'].widget.attrs.update({'class': 'form-control'})
self.fields['admon_tiempo_2'].widget.attrs.update({'class': 'form-control'})
self.fields['admon_tiempo_3'].widget.attrs.update({'class': 'form-control'})
self.fields['pensamiento_estrategico_1'].widget.attrs.update({'class': 'form-control'})
self.fields['pensamiento_estrategico_2'].widget.attrs.update({'class': 'form-control'})
self.fields['pensamiento_estrategico_3'].widget.attrs.update({'class': 'form-control'})
self.fields['pensamiento_estrategico_4'].widget.attrs.update({'class': 'form-control'})
self.fields['enfoque_resultados_1'].widget.attrs.update({'class': 'form-control'})
self.fields['enfoque_resultados_2'].widget.attrs.update({'class': 'form-control'})
self.fields['enfoque_resultados_3'].widget.attrs.update({'class': 'form-control'})
self.fields['enfoque_resultados_4'].widget.attrs.update({'class': 'form-control'})
#elf.fields['touser'].disabled = True
``` |
{
"source": "JorgeSerranoP/Practica_Busqueda_HO",
"score": 4
} |
#### File: Practica_Busqueda_HO/Ejemplos/sum-words.py
```python
import constraint
# this simple Python script solves the following sum-word brain
# teaser:
#
# S E N D
# + M O R E
# -----------
# M O N E Y
# a b c d
# sumWordConstraint
#
# verifies that a + b = c but taking into account an overflow coming
# from the previous summation and that an overflow might be generated
# in this one
def sumWordConstraint (a, b, c, post, pre=0):
return pre+a+b == c + 10*post
# main
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# create a new problem
problem = constraint.Problem ()
# variables
# -------------------------------------------------------------------------
# upper-case letters stand for the variables to unveal wheras
# lower-case letters stand for the overflow
problem.addVariables ("SENDMORY",range (10))
problem.addVariables ("abcd", range (2))
# constraints
# -------------------------------------------------------------------------
# the constraints are numerically described as follows
problem.addConstraint (sumWordConstraint, ('D', 'E', 'Y', 'd'))
problem.addConstraint (sumWordConstraint, ('N', 'R', 'E', 'c', 'd'))
problem.addConstraint (sumWordConstraint, ('E', 'O', 'N', 'b', 'c'))
problem.addConstraint (sumWordConstraint, ('S', 'M', 'O', 'a', 'b'))
problem.addConstraint (constraint.AllDifferentConstraint (), "SENDMORY")
problem.addConstraint (lambda x, y: x==y, ("M", "a"))
# compute the solutions
solutions = problem.getSolutions ()
print (" #{0} solutions have been found: ".format (len (solutions)))
for isolution in solutions:
print ("""
{0} {1} {2} {3}
+ {4} {5} {6} {1}
----------
{4} {5} {2} {1} {7}""".format (isolution['S'], isolution['E'], isolution['N'], isolution['D'],
isolution['M'], isolution['O'], isolution['R'], isolution['Y']))
# Local Variables:
# mode:python
# fill-column:80
# End:
``` |
{
"source": "JorgeSerranoP/Practica_GridWorld_AA",
"score": 3
} |
#### File: Practica_GridWorld_AA/Reinforcement_tutorial/bustersAgents.py
```python
from __future__ import print_function
# bustersAgents.py
# ----------------
from builtins import str
from builtins import range
from builtins import object
import util
from game import Agent
from game import Directions
from keyboardAgents import KeyboardAgent
import inference
import busters
class NullGraphics(object):
"Placeholder for graphics"
def initialize(self, state, isBlue = False):
pass
def update(self, state):
pass
def pause(self):
pass
def draw(self, state):
pass
def updateDistributions(self, dist):
pass
def finish(self):
pass
class KeyboardInference(inference.InferenceModule):
"""
Basic inference module for use with the keyboard.
"""
def initializeUniformly(self, gameState):
"Begin with a uniform distribution over ghost positions."
self.beliefs = util.Counter()
for p in self.legalPositions: self.beliefs[p] = 1.0
self.beliefs.normalize()
def observe(self, observation, gameState):
noisyDistance = observation
emissionModel = busters.getObservationDistribution(noisyDistance)
pacmanPosition = gameState.getPacmanPosition()
allPossible = util.Counter()
for p in self.legalPositions:
trueDistance = util.manhattanDistance(p, pacmanPosition)
if emissionModel[trueDistance] > 0:
allPossible[p] = 1.0
allPossible.normalize()
self.beliefs = allPossible
def elapseTime(self, gameState):
pass
def getBeliefDistribution(self):
return self.beliefs
class BustersAgent(object):
"An agent that tracks and displays its beliefs about ghost positions."
def __init__( self, index = 0, inference = "ExactInference", ghostAgents = None, observeEnable = True, elapseTimeEnable = True):
inferenceType = util.lookup(inference, globals())
self.inferenceModules = [inferenceType(a) for a in ghostAgents]
self.observeEnable = observeEnable
self.elapseTimeEnable = elapseTimeEnable
def registerInitialState(self, gameState):
"Initializes beliefs and inference modules"
import __main__
self.display = __main__._display
for inference in self.inferenceModules:
inference.initialize(gameState)
self.ghostBeliefs = [inf.getBeliefDistribution() for inf in self.inferenceModules]
self.firstMove = True
def observationFunction(self, gameState):
"Removes the ghost states from the gameState"
agents = gameState.data.agentStates
gameState.data.agentStates = [agents[0]] + [None for i in range(1, len(agents))]
return gameState
def getAction(self, gameState):
"Updates beliefs, then chooses an action based on updated beliefs."
for index, inf in enumerate(self.inferenceModules):
if not self.firstMove and self.elapseTimeEnable:
inf.elapseTime(gameState)
self.firstMove = False
if self.observeEnable:
inf.observeState(gameState)
self.ghostBeliefs[index] = inf.getBeliefDistribution()
self.display.updateDistributions(self.ghostBeliefs)
return self.chooseAction(gameState)
def chooseAction(self, gameState):
"By default, a BustersAgent just stops. This should be overridden."
return Directions.STOP
class BustersKeyboardAgent(BustersAgent, KeyboardAgent):
"An agent controlled by the keyboard that displays beliefs about ghost positions."
def __init__(self, index = 0, inference = "KeyboardInference", ghostAgents = None):
KeyboardAgent.__init__(self, index)
BustersAgent.__init__(self, index, inference, ghostAgents)
def getAction(self, gameState):
return BustersAgent.getAction(self, gameState)
def chooseAction(self, gameState):
return KeyboardAgent.getAction(self, gameState)
from distanceCalculator import Distancer
from game import Actions
from game import Directions
import random, sys
'''Random PacMan Agent'''
class RandomPAgent(BustersAgent):
def registerInitialState(self, gameState):
BustersAgent.registerInitialState(self, gameState)
self.distancer = Distancer(gameState.data.layout, False)
''' Example of counting something'''
def countFood(self, gameState):
food = 0
for width in gameState.data.food:
for height in width:
if(height == True):
food = food + 1
return food
''' Print the layout'''
def printGrid(self, gameState):
table = ""
##print(gameState.data.layout) ## Print by terminal
for x in range(gameState.data.layout.width):
for y in range(gameState.data.layout.height):
food, walls = gameState.data.food, gameState.data.layout.walls
table = table + gameState.data._foodWallStr(food[x][y], walls[x][y]) + ","
table = table[:-1]
return table
def printLineData(self,gameState):
'''Observations of the state
print(str(gameState.livingGhosts))
print(gameState.data.agentStates[0])
print(gameState.getNumFood())
print (gameState.getCapsules())
width, height = gameState.data.layout.width, gameState.data.layout.height
print(width, height)
print(gameState.data.ghostDistances)
print(gameState.data.layout)'''
'''END Observations of the state'''
print(gameState)
weka_line = ""
for i in gameState.livingGhosts:
weka_line = weka_line + str(i) + ","
weka_line = weka_line + str(gameState.getNumFood()) + ","
for i in gameState.getCapsules():
weka_line = weka_line + str(i[0]) + "," + str(i[1]) + ","
for i in gameState.data.ghostDistances:
weka_line = weka_line + str(i) + ","
weka_line = weka_line + str(gameState.data.score) + "," +\
str(len(gameState.data.capsules)) + "," + str(self.countFood(gameState)) +\
"," + str(gameState.data.agentStates[0].configuration.pos[0]) + "," +\
str(gameState.data.agentStates[0].configuration.pos[0]) +\
"," + str(gameState.data.agentStates[0].scaredTimer) + "," +\
self.printGrid(gameState) + "," +\
str(gameState.data.agentStates[0].numReturned) + "," +\
str(gameState.data.agentStates[0].getPosition()[0]) + "," +\
str(gameState.data.agentStates[0].getPosition()[1])+ "," +\
str(gameState.data.agentStates[0].numCarrying)+ "," +\
str(gameState.data.agentStates[0].getDirection())
print(weka_line)
def chooseAction(self, gameState):
move = Directions.STOP
legal = gameState.getLegalActions(0) ##Legal position from the pacman
move_random = random.randint(0, 3)
self.printLineData(gameState)
if ( move_random == 0 ) and Directions.WEST in legal: move = Directions.WEST
if ( move_random == 1 ) and Directions.EAST in legal: move = Directions.EAST
if ( move_random == 2 ) and Directions.NORTH in legal: move = Directions.NORTH
if ( move_random == 3 ) and Directions.SOUTH in legal: move = Directions.SOUTH
return move
class GreedyBustersAgent(BustersAgent):
"An agent that charges the closest ghost."
def registerInitialState(self, gameState):
"Pre-computes the distance between every two points."
BustersAgent.registerInitialState(self, gameState)
self.distancer = Distancer(gameState.data.layout, False)
def chooseAction(self, gameState):
"""
First computes the most likely position of each ghost that has
not yet been captured, then chooses an action that brings
Pacman closer to the closest ghost (according to mazeDistance!).
To find the mazeDistance between any two positions, use:
self.distancer.getDistance(pos1, pos2)
To find the successor position of a position after an action:
successorPosition = Actions.getSuccessor(position, action)
livingGhostPositionDistributions, defined below, is a list of
util.Counter objects equal to the position belief
distributions for each of the ghosts that are still alive. It
is defined based on (these are implementation details about
which you need not be concerned):
1) gameState.getLivingGhosts(), a list of booleans, one for each
agent, indicating whether or not the agent is alive. Note
that pacman is always agent 0, so the ghosts are agents 1,
onwards (just as before).
2) self.ghostBeliefs, the list of belief distributions for each
of the ghosts (including ghosts that are not alive). The
indices into this list should be 1 less than indices into the
gameState.getLivingGhosts() list.
"""
pacmanPosition = gameState.getPacmanPosition()
legal = [a for a in gameState.getLegalPacmanActions()]
livingGhosts = gameState.getLivingGhosts()
livingGhostPositionDistributions = \
[beliefs for i, beliefs in enumerate(self.ghostBeliefs)
if livingGhosts[i+1]]
"*** YOUR CODE HERE ***"
return Directions.EAST
``` |
{
"source": "JorgeSerranoP/Practica_PacMan-Weka_AA",
"score": 3
} |
#### File: JorgeSerranoP/Practica_PacMan-Weka_AA/inference.py
```python
from builtins import object
import itertools
import util
import random
import busters
import game
class InferenceModule(object):
"""
An inference module tracks a belief distribution over a ghost's location.
This is an abstract class, which you should not modify.
"""
############################################
# Useful methods for all inference modules #
############################################
def __init__(self, ghostAgent):
"Sets the ghost agent for later access"
self.ghostAgent = ghostAgent
self.index = ghostAgent.index
self.obs = [] # most recent observation position
def getJailPosition(self):
return (2 * self.ghostAgent.index - 1, 1)
def getPositionDistribution(self, gameState):
"""
Returns a distribution over successor positions of the ghost from the
given gameState.
You must first place the ghost in the gameState, using setGhostPosition
below.
"""
ghostPosition = gameState.getGhostPosition(self.index) # The position you set
actionDist = self.ghostAgent.getDistribution(gameState)
dist = util.Counter()
for action, prob in list(actionDist.items()):
successorPosition = game.Actions.getSuccessor(ghostPosition, action)
dist[successorPosition] = prob
return dist
def setGhostPosition(self, gameState, ghostPosition):
"""
Sets the position of the ghost for this inference module to the
specified position in the supplied gameState.
Note that calling setGhostPosition does not change the position of the
ghost in the GameState object used for tracking the true progression of
the game. The code in inference.py only ever receives a deep copy of
the GameState object which is responsible for maintaining game state,
not a reference to the original object. Note also that the ghost
distance observations are stored at the time the GameState object is
created, so changing the position of the ghost will not affect the
functioning of observeState.
"""
conf = game.Configuration(ghostPosition, game.Directions.STOP)
gameState.data.agentStates[self.index] = game.AgentState(conf, False)
return gameState
def observeState(self, gameState):
"Collects the relevant noisy distance observation and pass it along."
distances = gameState.getNoisyGhostDistances()
if len(distances) >= self.index: # Check for missing observations
obs = distances[self.index - 1]
self.obs = obs
self.observe(obs, gameState)
def initialize(self, gameState):
"Initializes beliefs to a uniform distribution over all positions."
# The legal positions do not include the ghost prison cells in the bottom left.
self.legalPositions = [p for p in gameState.getWalls().asList(False) if p[1] > 1]
self.initializeUniformly(gameState)
######################################
# Methods that need to be overridden #
######################################
def initializeUniformly(self, gameState):
"Sets the belief state to a uniform prior belief over all positions."
pass
def observe(self, observation, gameState):
"Updates beliefs based on the given distance observation and gameState."
pass
def elapseTime(self, gameState):
"Updates beliefs for a time step elapsing from a gameState."
pass
def getBeliefDistribution(self):
"""
Returns the agent's current belief state, a distribution over ghost
locations conditioned on all evidence so far.
"""
pass
class ExactInference(InferenceModule):
"""
The exact dynamic inference module should use forward-algorithm updates to
compute the exact belief function at each time step.
"""
def initializeUniformly(self, gameState):
"Begin with a uniform distribution over ghost positions."
self.beliefs = util.Counter()
for p in self.legalPositions: self.beliefs[p] = 1.0
self.beliefs.normalize()
def observe(self, observation, gameState):
"""
Updates beliefs based on the distance observation and Pacman's position.
The noisyDistance is the estimated Manhattan distance to the ghost you
are tracking.
The emissionModel below stores the probability of the noisyDistance for
any true distance you supply. That is, it stores P(noisyDistance |
TrueDistance).
self.legalPositions is a list of the possible ghost positions (you
should only consider positions that are in self.legalPositions).
A correct implementation will handle the following special case:
* When a ghost is captured by Pacman, all beliefs should be updated
so that the ghost appears in its prison cell, position
self.getJailPosition()
You can check if a ghost has been captured by Pacman by
checking if it has a noisyDistance of None (a noisy distance
of None will be returned if, and only if, the ghost is
captured).
"""
noisyDistance = observation
emissionModel = busters.getObservationDistribution(noisyDistance)
pacmanPosition = gameState.getPacmanPosition()
"*** YOUR CODE HERE ***"
##AA
'''print("Estamos Aqui")
print(pacmanPosition)
print(noisyDistance)
print(emissionModel)
print("Estamos Aqui")
##move = Directions.WEST
#AA'''
##util.raiseNotDefined()
# Replace this code with a correct observation update
# Be sure to handle the "jail" edge case where the ghost is eaten
# and noisyDistance is None
allPossible = util.Counter()
for p in self.legalPositions:
trueDistance = util.manhattanDistance(p, pacmanPosition)
if emissionModel[trueDistance] > 0:
allPossible[p] = 1.0
"*** END YOUR CODE HERE ***"
allPossible.normalize()
self.beliefs = allPossible
def elapseTime(self, gameState):
"""
Update self.beliefs in response to a time step passing from the current
state.
The transition model is not entirely stationary: it may depend on
Pacman's current position (e.g., for DirectionalGhost). However, this
is not a problem, as Pacman's current position is known.
In order to obtain the distribution over new positions for the ghost,
given its previous position (oldPos) as well as Pacman's current
position, use this line of code:
newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos))
Note that you may need to replace "oldPos" with the correct name of the
variable that you have used to refer to the previous ghost position for
which you are computing this distribution. You will need to compute
multiple position distributions for a single update.
newPosDist is a util.Counter object, where for each position p in
self.legalPositions,
newPostDist[p] = Pr( ghost is at position p at time t + 1 | ghost is at position oldPos at time t )
(and also given Pacman's current position). You may also find it useful
to loop over key, value pairs in newPosDist, like:
for newPos, prob in newPosDist.items():
...
*** GORY DETAIL AHEAD ***
As an implementation detail (with which you need not concern yourself),
the line of code at the top of this comment block for obtaining
newPosDist makes use of two helper methods provided in InferenceModule
above:
1) self.setGhostPosition(gameState, ghostPosition)
This method alters the gameState by placing the ghost we're
tracking in a particular position. This altered gameState can be
used to query what the ghost would do in this position.
2) self.getPositionDistribution(gameState)
This method uses the ghost agent to determine what positions the
ghost will move to from the provided gameState. The ghost must be
placed in the gameState with a call to self.setGhostPosition
above.
It is worthwhile, however, to understand why these two helper methods
are used and how they combine to give us a belief distribution over new
positions after a time update from a particular position.
"""
"*** YOUR CODE HERE ***"
##util.raiseNotDefined()
def getBeliefDistribution(self):
return self.beliefs
class ParticleFilter(InferenceModule):
"""
A particle filter for approximately tracking a single ghost.
Useful helper functions will include random.choice, which chooses an element
from a list uniformly at random, and util.sample, which samples a key from a
Counter by treating its values as probabilities.
"""
def __init__(self, ghostAgent, numParticles=300):
InferenceModule.__init__(self, ghostAgent);
self.setNumParticles(numParticles)
def setNumParticles(self, numParticles):
self.numParticles = numParticles
def initializeUniformly(self, gameState):
"""
Initializes a list of particles. Use self.numParticles for the number of
particles. Use self.legalPositions for the legal board positions where a
particle could be located. Particles should be evenly (not randomly)
distributed across positions in order to ensure a uniform prior.
Note: the variable you store your particles in must be a list; a list is
simply a collection of unweighted variables (positions in this case).
Storing your particles as a Counter (where there could be an associated
weight with each position) is incorrect and may produce errors.
"""
"*** YOUR CODE HERE ***"
def observe(self, observation, gameState):
"""
Update beliefs based on the given distance observation. Make sure to
handle the special case where all particles have weight 0 after
reweighting based on observation. If this happens, resample particles
uniformly at random from the set of legal positions
(self.legalPositions).
A correct implementation will handle two special cases:
1) When a ghost is captured by Pacman, all particles should be updated
so that the ghost appears in its prison cell,
self.getJailPosition()
As before, you can check if a ghost has been captured by Pacman by
checking if it has a noisyDistance of None.
2) When all particles receive 0 weight, they should be recreated from
the prior distribution by calling initializeUniformly. The total
weight for a belief distribution can be found by calling totalCount
on a Counter object
util.sample(Counter object) is a helper method to generate a sample from
a belief distribution.
You may also want to use util.manhattanDistance to calculate the
distance between a particle and Pacman's position.
"""
noisyDistance = observation
emissionModel = busters.getObservationDistribution(noisyDistance)
pacmanPosition = gameState.getPacmanPosition()
"*** YOUR CODE HERE ***"
##util.raiseNotDefined()
def elapseTime(self, gameState):
"""
Update beliefs for a time step elapsing.
As in the elapseTime method of ExactInference, you should use:
newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos))
to obtain the distribution over new positions for the ghost, given its
previous position (oldPos) as well as Pacman's current position.
util.sample(Counter object) is a helper method to generate a sample from
a belief distribution.
"""
"*** YOUR CODE HERE ***"
##util.raiseNotDefined()
def getBeliefDistribution(self):
"""
Return the agent's current belief state, a distribution over ghost
locations conditioned on all evidence and time passage. This method
essentially converts a list of particles into a belief distribution (a
Counter object)
"""
"*** YOUR CODE HERE ***"
## util.raiseNotDefined()
class MarginalInference(InferenceModule):
"""
A wrapper around the JointInference module that returns marginal beliefs
about ghosts.
"""
def initializeUniformly(self, gameState):
"Set the belief state to an initial, prior value."
if self.index == 1:
jointInference.initialize(gameState, self.legalPositions)
jointInference.addGhostAgent(self.ghostAgent)
def observeState(self, gameState):
"Update beliefs based on the given distance observation and gameState."
if self.index == 1:
jointInference.observeState(gameState)
def elapseTime(self, gameState):
"Update beliefs for a time step elapsing from a gameState."
if self.index == 1:
jointInference.elapseTime(gameState)
def getBeliefDistribution(self):
"Returns the marginal belief over a particular ghost by summing out the others."
jointDistribution = jointInference.getBeliefDistribution()
dist = util.Counter()
for t, prob in list(jointDistribution.items()):
dist[t[self.index - 1]] += prob
return dist
class JointParticleFilter(object):
"""
JointParticleFilter tracks a joint distribution over tuples of all ghost
positions.
"""
def __init__(self, numParticles=600):
self.setNumParticles(numParticles)
def setNumParticles(self, numParticles):
self.numParticles = numParticles
def initialize(self, gameState, legalPositions):
"Stores information about the game, then initializes particles."
self.numGhosts = gameState.getNumAgents() - 1
self.ghostAgents = []
self.legalPositions = legalPositions
self.initializeParticles()
def initializeParticles(self):
"""
Initialize particles to be consistent with a uniform prior.
Each particle is a tuple of ghost positions. Use self.numParticles for
the number of particles. You may find the `itertools` package helpful.
Specifically, you will need to think about permutations of legal ghost
positions, with the additional understanding that ghosts may occupy the
same space. Look at the `itertools.product` function to get an
implementation of the Cartesian product.
Note: If you use itertools, keep in mind that permutations are not
returned in a random order; you must shuffle the list of permutations in
order to ensure even placement of particles across the board. Use
self.legalPositions to obtain a list of positions a ghost may occupy.
Note: the variable you store your particles in must be a list; a list is
simply a collection of unweighted variables (positions in this case).
Storing your particles as a Counter (where there could be an associated
weight with each position) is incorrect and may produce errors.
"""
"*** YOUR CODE HERE ***"
def addGhostAgent(self, agent):
"""
Each ghost agent is registered separately and stored (in case they are
different).
"""
self.ghostAgents.append(agent)
def getJailPosition(self, i):
return (2 * i + 1, 1);
def observeState(self, gameState):
"""
Resamples the set of particles using the likelihood of the noisy
observations.
To loop over the ghosts, use:
for i in range(self.numGhosts):
...
A correct implementation will handle two special cases:
1) When a ghost is captured by Pacman, all particles should be updated
so that the ghost appears in its prison cell, position
self.getJailPosition(i) where `i` is the index of the ghost.
As before, you can check if a ghost has been captured by Pacman by
checking if it has a noisyDistance of None.
2) When all particles receive 0 weight, they should be recreated from
the prior distribution by calling initializeParticles. After all
particles are generated randomly, any ghosts that are eaten (have
noisyDistance of None) must be changed to the jail Position. This
will involve changing each particle if a ghost has been eaten.
self.getParticleWithGhostInJail is a helper method to edit a specific
particle. Since we store particles as tuples, they must be converted to
a list, edited, and then converted back to a tuple. This is a common
operation when placing a ghost in jail.
"""
pacmanPosition = gameState.getPacmanPosition()
noisyDistances = gameState.getNoisyGhostDistances()
if len(noisyDistances) < self.numGhosts:
return
emissionModels = [busters.getObservationDistribution(dist) for dist in noisyDistances]
"*** YOUR CODE HERE ***"
def getParticleWithGhostInJail(self, particle, ghostIndex):
"""
Takes a particle (as a tuple of ghost positions) and returns a particle
with the ghostIndex'th ghost in jail.
"""
particle = list(particle)
particle[ghostIndex] = self.getJailPosition(ghostIndex)
return tuple(particle)
def elapseTime(self, gameState):
"""
Samples each particle's next state based on its current state and the
gameState.
To loop over the ghosts, use:
for i in range(self.numGhosts):
...
Then, assuming that `i` refers to the index of the ghost, to obtain the
distributions over new positions for that single ghost, given the list
(prevGhostPositions) of previous positions of ALL of the ghosts, use
this line of code:
newPosDist = getPositionDistributionForGhost(
setGhostPositions(gameState, prevGhostPositions), i, self.ghostAgents[i]
)
Note that you may need to replace `prevGhostPositions` with the correct
name of the variable that you have used to refer to the list of the
previous positions of all of the ghosts, and you may need to replace `i`
with the variable you have used to refer to the index of the ghost for
which you are computing the new position distribution.
As an implementation detail (with which you need not concern yourself),
the line of code above for obtaining newPosDist makes use of two helper
functions defined below in this file:
1) setGhostPositions(gameState, ghostPositions)
This method alters the gameState by placing the ghosts in the
supplied positions.
2) getPositionDistributionForGhost(gameState, ghostIndex, agent)
This method uses the supplied ghost agent to determine what
positions a ghost (ghostIndex) controlled by a particular agent
(ghostAgent) will move to in the supplied gameState. All ghosts
must first be placed in the gameState using setGhostPositions
above.
The ghost agent you are meant to supply is
self.ghostAgents[ghostIndex-1], but in this project all ghost
agents are always the same.
"""
newParticles = []
for oldParticle in self.particles:
newParticle = list(oldParticle) # A list of ghost positions
# now loop through and update each entry in newParticle...
"*** YOUR CODE HERE ***"
"*** END YOUR CODE HERE ***"
newParticles.append(tuple(newParticle))
self.particles = newParticles
def getBeliefDistribution(self):
"*** YOUR CODE HERE ***"
util.raiseNotDefined()
# One JointInference module is shared globally across instances of MarginalInference
jointInference = JointParticleFilter()
def getPositionDistributionForGhost(gameState, ghostIndex, agent):
"""
Returns the distribution over positions for a ghost, using the supplied
gameState.
"""
# index 0 is pacman, but the students think that index 0 is the first ghost.
ghostPosition = gameState.getGhostPosition(ghostIndex+1)
actionDist = agent.getDistribution(gameState)
dist = util.Counter()
for action, prob in list(actionDist.items()):
successorPosition = game.Actions.getSuccessor(ghostPosition, action)
dist[successorPosition] = prob
return dist
def setGhostPositions(gameState, ghostPositions):
"Sets the position of all ghosts to the values in ghostPositionTuple."
for index, pos in enumerate(ghostPositions):
conf = game.Configuration(pos, game.Directions.STOP)
gameState.data.agentStates[index + 1] = game.AgentState(conf, False)
return gameState
``` |
{
"source": "JorgeSidgo/yt-bulk",
"score": 3
} |
#### File: yt-bulk/logic/convert.py
```python
import os
import json
from moviepy.editor import *
def convert():
SAVE_PATH = "C:/Users/jsidg/Desktop/VIEJO"
for count, filename in enumerate(os.listdir(SAVE_PATH + "/VIDEO")):
video = VideoFileClip(SAVE_PATH + "/VIDEO/" + filename)
video.audio.write_audiofile(
SAVE_PATH + "/MUSICA/" + filename[:-4] + '.mp3', bitrate='256k', logger=None)
video.close()
print(jsonify('-', filename[:-4], 'Completo'))
def jsonify(link, title, status):
resp = {
"link": link,
"title": title,
"status": status
}
return json.dumps(resp)
convert()
sys.stdout.flush()
``` |
{
"source": "jorgesilva15/Dork",
"score": 2
} |
#### File: Dork/tests/conftest.py
```python
import pytest
import dork.types as types
pytest_plugins = ["pytester"] # pylint: disable=invalid-name
@pytest.fixture
def player():
"""A basic dork player fixture
"""
return types.Player(types.GAME)
@pytest.fixture
def room():
"""A basic dork room fixture
"""
return types.Room(types.GAME)
def noop():
"""noop replacement
"""
def noop_kwargs(_, **__): # noqa: E722
"""noop with kwargs
"""
def noop_args(*_):
"""noop multiple arguments
"""
@pytest.fixture(autouse=True)
def maptype(monkeypatch):
"""fixture that prevents Map from drawing a plot
"""
import pylab
import networkx
monkeypatch.setattr(pylab, 'show', noop)
monkeypatch.setattr(networkx, "draw", noop_kwargs)
monkeypatch.setattr(types.Map, "_setup_window", noop_args)
@pytest.fixture
def run(mocker, capsys):
"""CLI run method fixture
"""
def do_run(main, *args, **kwargs):
mocked_input = mocker.patch('builtins.input')
mocked_input.side_effect = kwargs.get('input_values', ['quit'] * 100)
main(*args)
cap = capsys.readouterr()
return cap.out, cap.err
return do_run
```
#### File: Dork/tests/test_dork_saveload.py
```python
from types import FunctionType
import yaml
import dork.saveload
from dork import types
def testsave(run):
"""Save data should actually work no matter what
type of data is used.
"""
assert isinstance(dork.saveload.save, FunctionType)
try:
with open('./dork/yaml/default.yml') as file:
# Should not call load directly
data = yaml.safe_load(file.read())
game = types.Game(data)
run(dork.saveload.save, game, input_values=['roomdatatest'])
run(dork.saveload.save, game, input_values=['default', 'roomdatatest'])
run(dork.saveload.save, game, input_values=['\0', 'roomdatatest'])
except: # noqa: E722
raise AssertionError("cannot run 'dork' command")
def testload(run):
"""load should grab the data and parse it without further input
"""
assert isinstance(dork.saveload.load, FunctionType)
try:
run(dork.saveload.load, input_values=['default'])
run(dork.saveload.load, input_values=['\0', 'default'])
except: # noqa: E722
raise AssertionError("cannot run 'dork' command")
def teststate(run):
"""Ensures the game state is loaded properly
"""
assert isinstance(dork.saveload.game_state, FunctionType)
try:
run(dork.saveload.game_state)
except: # noqa: E722
raise AssertionError("cannot run 'dork' command")
``` |
{
"source": "jorgessanchez7/Global_Forecast_Validation",
"score": 3
} |
#### File: jorgessanchez7/Global_Forecast_Validation/validate_forecasts.py
```python
import xarray as xr
import pandas as pd
import numpy as np
import os
import numba as nb
import time
import dask.array as da
def compute_all(work_dir, memory_to_allocate_gb, date_strings):
array_size_bytes = 3060 # Based on 15 x 51 member array
memory_to_allocate_bytes = memory_to_allocate_gb * 1e9
files = [os.path.join(work_dir, i + ".nc") for i in date_strings]
chunk_size = int(np.floor(memory_to_allocate_bytes / ((array_size_bytes * len(files)) + len(files))))
print("Chunk Size:", chunk_size)
list_of_dask_q_arrays = []
list_of_dask_init_arrays = []
# Creating a large dask array with all of the data in it
start = time.time()
for file in files:
ds = xr.open_dataset(file, chunks={"rivid": chunk_size})
tmp_dask_q_array = ds["Qout"].data
list_of_dask_q_arrays.append(tmp_dask_q_array)
tmp_dask_init_array = ds["initialization_values"].data
list_of_dask_init_arrays.append(tmp_dask_init_array)
ds.close()
end = time.time()
big_dask_q_array = da.stack(list_of_dask_q_arrays)
big_dask_init_array = da.stack(list_of_dask_init_arrays)
print(big_dask_q_array.shape)
print(big_dask_init_array.shape)
print("Time to create dask arrays: ", end - start)
# Retrieving the number of streams and their corresponding Rivids
tmp_dataset = xr.open_dataset(files[0])
num_of_streams = tmp_dataset['rivid'].size
rivids = tmp_dataset['rivid'].data
tmp_dataset.close()
num_chunk_iterations = int(np.ceil(num_of_streams / chunk_size))
start_chunk = 0
end_chunk = chunk_size
list_of_tuples_with_metrics = []
for chunk_number in range(num_chunk_iterations):
start = time.time()
big_forecast_data_array = np.asarray(big_dask_q_array[:, start_chunk:end_chunk, :, :])
big_init_data_array = np.asarray(big_dask_init_array[:, start_chunk:end_chunk])
end = time.time()
print("Time to read from disk:", end - start)
rivids_chunk = rivids[start_chunk:end_chunk]
start = time.time()
results_array = numba_calculate_metrics(
big_forecast_data_array, big_init_data_array, len(files), big_forecast_data_array.shape[1], 15
)
end = time.time()
print("Numba Calculation Time: ", end - start)
for rivid in range(results_array.shape[1]):
for forecast_day in range(results_array.shape[0]):
tmp_array = results_array[forecast_day, rivid, :]
tuple_to_append = (rivids_chunk[rivid], '{} Day Forecast'.format(str(forecast_day + 1).zfill(2)),
tmp_array[0], tmp_array[1], tmp_array[2])
list_of_tuples_with_metrics.append(tuple_to_append)
start_chunk += chunk_size
end_chunk += chunk_size
final_df = pd.DataFrame(list_of_tuples_with_metrics,
columns=['Rivid', 'Forecast Day', 'CRPS', 'CRPS BENCH', 'CRPSS'])
final_df.to_csv(r'/Users/wade/PycharmProjects/Forecast_Validation/South_America_Test_DF.csv', index=False)
@nb.njit(parallel=True)
def numba_calculate_metrics(forecast_array, initialization_array, number_of_start_dates, number_of_streams,
num_forecast_days):
"""
Parameters
----------
forecast_array: 4D ndarray
A 4 dimensional numPy array with the following dimensions: 1) Start Date Number (365 if there are a year's
forecasts), 2) Unique stream ID, 3) Forecast Days (e.g. 1-15 in a 15 day forecast), 4) Ensembles
initialization_array: 2D ndarray
A 2 dimenensional NumPy array with the following dimensions: 1) Start Dates, 2) Unique stream ID
number_of_start_dates: The number of start dates for the analysis to perform
number_of_streams:
The number of streams in the analysis
num_forecast_days:
The number of forecast days in the analysis
Returns
-------
ndarray
An ndarray with the folowing dimenstions:
1) Forecast Day: 1-15 in the case of a 15 day forecast
2) Rivid: The stream unique ID
3) Metrics: CRPS, CRPS_BENCH, CRPSS
"""
return_array = np.zeros((num_forecast_days, number_of_streams, 3), dtype=np.float32)
for stream in nb.prange(number_of_streams):
for forecast_day in range(num_forecast_days):
initialization_vals = initialization_array[(forecast_day + 1):, stream]
# np.savetxt("init_test.txt", initialization_vals)
forecasts = forecast_array[:(number_of_start_dates - (forecast_day + 1)), stream, forecast_day, :]
# np.savetxt("forecasts_test.txt", forecasts)
benchmark_forecasts = initialization_array[:(number_of_start_dates - (forecast_day + 1)), stream]
# np.savetxt("benchmark_forecasts_test.txt", benchmark_forecasts)
crps = ens_crps(initialization_vals, forecasts)
crps_bench = mae(initialization_vals, benchmark_forecasts)
if crps_bench == 0:
crpss = np.inf
print("Warning: Division by zero on: ", stream)
else:
crpss = 1 - crps / crps_bench
return_array[forecast_day, stream, 0] = crps
return_array[forecast_day, stream, 1] = crps_bench
return_array[forecast_day, stream, 2] = crpss
# print(crps, crps_bench, crpss)
if (stream % 1000) == 0:
print("Count: ", stream)
return return_array
@nb.njit()
def mae(sim, obs):
return np.mean(np.abs(sim - obs))
@nb.njit()
def ens_crps(obs, fcst_ens, adj=np.nan):
rows = obs.size
cols = fcst_ens.shape[1]
col_len_array = np.ones(rows) * cols
sad_ens_half = np.zeros(rows)
sad_obs = np.zeros(rows)
crps = np.zeros(rows)
crps = numba_crps(
fcst_ens, obs, rows, cols, col_len_array, sad_ens_half, sad_obs, crps, np.float64(adj)
)
# Calc mean crps as simple mean across crps[i]
crps_mean = np.mean(crps)
return crps_mean
@nb.njit()
def numba_crps(ens, obs, rows, cols, col_len_array, sad_ens_half, sad_obs, crps, adj):
for i in range(rows):
the_obs = obs[i]
the_ens = ens[i, :]
the_ens = np.sort(the_ens)
sum_xj = 0.
sum_jxj = 0.
j = 0
while j < cols:
sad_obs[i] += np.abs(the_ens[j] - the_obs)
sum_xj += the_ens[j]
sum_jxj += (j + 1) * the_ens[j]
j += 1
sad_ens_half[i] = 2.0 * sum_jxj - (col_len_array[i] + 1) * sum_xj
if np.isnan(adj):
for i in range(rows):
crps[i] = sad_obs[i] / col_len_array[i] - sad_ens_half[i] / \
(col_len_array[i] * col_len_array[i])
elif adj > 1:
for i in range(rows):
crps[i] = sad_obs[i] / col_len_array[i] - sad_ens_half[i] / \
(col_len_array[i] * (col_len_array[i] - 1)) * (1 - 1 / adj)
elif adj == 1:
for i in range(rows):
crps[i] = sad_obs[i] / col_len_array[i]
else:
for i in range(rows):
crps[i] = np.nan
return crps
if __name__ == "__main__":
starting_date = "2018-08-19"
ending_date = "2018-12-16"
dates_range = pd.date_range(starting_date, ending_date)
dates_strings = dates_range.strftime("%Y%m%d").tolist()
workspace = r'/Users/wade/Documents/South_America_Forecasts'
MEMORY_TO_ALLOCATE = 1.0 # GB
start = time.time()
compute_all(workspace, MEMORY_TO_ALLOCATE, dates_strings)
end = time.time()
print(end - start)
``` |
{
"source": "jorgetagle/dagger",
"score": 2
} |
#### File: dagger/cli/init_pipeline.py
```python
from dagger.pipeline.pipeline import Pipeline
from dagger.utils import Printer
import click
@click.command()
def init_pipeline() -> None:
"""
Printing pipeline template config
"""
Printer.print_success(Pipeline.sample())
if __name__ == "__main__":
main()
```
#### File: dagger/cli/init_task.py
```python
import sys
import click
from dagger.pipeline.task_factory import TaskFactory
from dagger.utils import Printer
task_factory = TaskFactory()
valid_tasks = task_factory.factory.keys()
@click.command()
@click.option("--type", "-t", help="Type of task")
def init_task(type: str) -> None:
"""
Printing task template config
"""
Printer.print_success(task_factory.factory[type].sample())
@click.command()
def list_tasks() -> None:
"""
Printing valid task types
"""
Printer.print_success("\n".join(valid_tasks))
if __name__ == "__main__":
task_factory.factory["batch"].sample()
```
#### File: dagger/cli/print_graph.py
```python
import click
from dagger.config_finder.config_finder import ConfigFinder
from dagger.config_finder.config_processor import ConfigProcessor
from dagger.graph.task_graph import TaskGraph
def _print_graph(root_dir: str):
cf = ConfigFinder(root_dir)
cp = ConfigProcessor(cf)
pipelines = cp.process_pipeline_configs()
g = TaskGraph()
for pipeline in pipelines:
g.add_pipeline(pipeline)
g.print_graph()
@click.command()
@click.option("--root", "-r", help="Root directory")
def print_graph(root: str) -> None:
"""
Printing task template config
"""
_print_graph(root)
if __name__ == "__main__":
_print_graph("./")
```
#### File: dagger/config_finder/config_finder.py
```python
import fnmatch
import logging
import os
from typing import List
PIPELINE_CONFIG_FILENAME = "pipeline.yaml"
_logger = logging.getLogger("configFinder")
class TaskConfig:
def __init__(self, config: str):
self._config = config
@property
def config(self):
return self._config
class PipelineConfig:
def __init__(self, directory: str, config: str, job_configs: List[TaskConfig]):
"""[summary]
Parameters
----------
config : str
[Pipeline config string]
job_configs : List[TaskConfig]
[List of job config strings belonging to this pipeline]
"""
self._directory = directory
self._config = config
self._job_configs = job_configs
@property
def directory(self):
return self._directory
@property
def config(self):
return self._config
@property
def job_configs(self):
return self._job_configs
class ConfigFinder:
def __init__(self, root: str):
self._root = root
def find_configs(self) -> List[PipelineConfig]:
_logger.info("Collecting config files from: %s", self._root)
pipeline_configs = []
for root, _, files in os.walk(self._root):
confs = fnmatch.filter(files, "*.yaml")
if len(confs) <= 1:
continue
job_configs = []
_logger.info("Searching pipeline config in directory: %s", root)
pipeline_config_file = ""
for conf in confs:
if conf == PIPELINE_CONFIG_FILENAME:
pipeline_config_file = conf
_logger.info("Config found in directory: %s", root)
else:
job_configs.append(TaskConfig(conf))
if pipeline_config_file == "":
_logger.info("Didn't find config in directory: %s", root)
continue
pipeline_configs.append(
PipelineConfig(root, pipeline_config_file, job_configs)
)
return pipeline_configs
```
#### File: dagger/config_finder/config_processor.py
```python
import logging
from os import environ
from os.path import join, relpath, splitext
from mergedeep import merge
import yaml
from envyaml import EnvYAML
from dagger.config_finder.config_finder import ConfigFinder
from dagger.pipeline.pipeline import Pipeline
from dagger.pipeline.task_factory import TaskFactory
import dagger.conf as conf
_logger = logging.getLogger("configFinder")
DAG_DIR = join(environ.get("AIRFLOW_HOME", "./"), "dags")
class ConfigProcessor:
def __init__(self, config_finder: ConfigFinder):
self._config_finder = config_finder
self._task_factory = TaskFactory()
def _load_yaml(self, yaml_path):
config_dict = EnvYAML(yaml_path).export()
config_dict = self.localize_params(config_dict)
return config_dict
def localize_params(self, config):
env_dependent_params = config.get("environments", {}).get(conf.ENV, {})
if env_dependent_params.get("deactivate"):
return None
merge(config, env_dependent_params)
return config
def process_pipeline_configs(self):
configs = self._config_finder.find_configs()
pipelines = []
for pipeline_config in configs:
pipeline_name = relpath(pipeline_config.directory, DAG_DIR).replace(
"/", "-"
)
config_path = join(pipeline_config.directory, pipeline_config.config)
_logger.info("Processing config: %s", config_path)
config_dict = self._load_yaml(config_path)
if config_dict:
pipeline = Pipeline(pipeline_config.directory, config_dict)
else:
_logger.info(f"{pipeline_name} pipeline is disabled in {conf.ENV} environment")
continue
for task_config in pipeline_config.job_configs:
task_name = splitext(task_config.config)[0]
task_config_path = join(pipeline_config.directory, task_config.config)
_logger.info("Processing task config: %s", task_config_path)
task_config = self._load_yaml(task_config_path)
if task_config:
task_type = task_config["type"]
pipeline.add_task(
self._task_factory.create_task(
task_type, task_name, pipeline_name, pipeline, task_config
)
)
else:
_logger.info(f"{task_name} job is disabled in {conf.ENV} environment")
pipelines.append(pipeline)
return pipelines
```
#### File: airflow/operator_creators/airflow_op_creator.py
```python
import importlib
from os import path
from dagger import conf
from dagger.dag_creator.airflow.operator_creator import OperatorCreator
class AirflowOpCreator(OperatorCreator):
ref_name = "airflow_operator"
def __init__(self, task, dag):
super().__init__(task, dag)
def _create_operator(self, **kwargs):
params = {**kwargs}
del params["description"]
airflow_operator_module = importlib.import_module(self._task.module)
operator_class = getattr(airflow_operator_module, self._task.class_name)
if self._task.python:
python_file = path.relpath(
path.join(self._task.pipeline.directory, self._task.python),
conf.AIRFLOW_HOME,
)
python_module = path.splitext(python_file)[0].replace("/", ".")
python_function = getattr(
importlib.import_module(python_module), self._task.function
)
params["python_callable"] = python_function
params["provide_context"] = True
params["op_kwargs"] = self._template_parameters
batch_op = operator_class(dag=self._dag, task_id=self._task.name, **params)
return batch_op
```
#### File: airflow/operator_creators/athena_transform_creator.py
```python
from os.path import join
from dagger.dag_creator.airflow.operator_creator import OperatorCreator
from dagger.dag_creator.airflow.operators.aws_athena_operator import AWSAthenaOperator
class AthenaTransformCreator(OperatorCreator):
ref_name = "athena_transform"
def __init__(self, task, dag):
super().__init__(task, dag)
athena_output = task._outputs[0]
self._output_database = athena_output.schema
self._output_table = athena_output.table
@staticmethod
def _read_sql(directory, file_path):
full_path = join(directory, file_path)
with open(full_path, "r") as f:
sql_string = f.read()
return sql_string
def _create_operator(self, **kwargs):
sql_string = self._read_sql(self._task.pipeline.directory, self._task.sql_file)
athena_op = AWSAthenaOperator(
dag=self._dag,
task_id=self._task.name,
query=sql_string,
aws_conn_id=self._task.aws_conn_id,
database=self._output_database,
s3_tmp_results_location=self._task.s3_tmp_results_location,
s3_output_location=self._task.s3_output_location,
output_table=self._output_table,
is_incremental=self._task.is_incremental,
partitioned_by=self._task.partitioned_by,
output_format=self._task.output_format,
workgroup=self._task.workgroup,
params=self._template_parameters,
**kwargs,
)
return athena_op
```
#### File: dag_creator/airflow/operator_factory.py
```python
from airflow.operators.dummy_operator import DummyOperator
from dagger.dag_creator.airflow.operator_creator import OperatorCreator
from dagger.dag_creator.airflow.operator_creators import (
airflow_op_creator,
athena_transform_creator,
batch_creator,
dummy_creator,
python_creator,
redshift_load_creator,
redshift_transform_creator,
redshift_unload_creator,
spark_creator,
sqoop_creator,
)
from dagger.dag_creator.airflow.utils.operator_factories import make_control_flow
class DataOperator(DummyOperator):
ui_color = "#e8f7e4"
def __init__(self, *args, **kwargs):
super(DataOperator, self).__init__(*args, **kwargs)
class OperatorFactory:
def __init__(self):
self.factory = dict()
for cls in OperatorCreator.__subclasses__():
self.factory[cls.ref_name] = cls
def create_operator(self, task, dag):
cls = self.factory.get(task.ref_name, dummy_creator.DummyCreator)
return cls(task, dag).create_operator()
@staticmethod
def create_control_flow_operator(is_dummy_operator_short_circuit, dag):
return make_control_flow(is_dummy_operator_short_circuit, dag)
@staticmethod
def create_dataset_operator(data_id, dag):
return DataOperator(dag=dag, task_id=data_id)
```
#### File: airflow/utils/operator_factories.py
```python
from functools import partial
from airflow.operators.python_operator import ShortCircuitOperator
def make_control_flow(is_dummy_operator_short_circuit, dag):
control_flow = ShortCircuitOperator(
task_id="dummy-control-flow",
dag=dag,
provide_context=True,
python_callable=partial(eval_control_flow, is_dummy_operator_short_circuit),
)
return control_flow
def eval_control_flow(is_dummy_operator_short_circuit, **kwargs):
True
if not is_dummy_operator_short_circuit:
return True
if kwargs["task_instance"].next_try_number > 2:
return True
return False
```
#### File: airflow/utils/utils.py
```python
from pathlib import Path
def get_sql_queries(path):
sql_queries = {}
for query_file in Path(path).glob("*.sql"):
with open(query_file, "r") as f:
sql_queries[query_file.stem] = f.read()
return sql_queries
```
#### File: dagger/graph/task_graph.py
```python
import logging
import sys
from abc import ABC
import dagger.pipeline.pipeline
from dagger.pipeline.io import IO
from dagger.pipeline.task import Task
from dagger.utilities.exceptions import IdAlreadyExistsException
from dagger.conf import config
_logger = logging.getLogger("graph")
class Node(ABC):
def __init__(self, node_id: str, name_to_show: str, obj=None):
self._node_id = node_id
self._name = name_to_show if name_to_show else node_id
self._parents = set()
self._children = set()
self._obj = obj
def __repr__(self):
return """
id: {node_id}
\tparents: {parents}
\tchildren: {children}
""".format(
node_id=self._name,
parents=", ".join(list(self._parents)),
children=", ".join(list(self._children)),
)
@property
def name(self):
return self._name
@property
def parents(self):
return self._parents
@property
def children(self):
return self._children
@property
def obj(self):
return self._obj
def add_parent(self, parent_id):
self._parents.add(parent_id)
def add_child(self, child_id):
self._children.add(child_id)
class Edge:
def __init__(self, follow_external_dependency=False):
self._follow_external_dependency = follow_external_dependency
@property
def follow_external_dependency(self):
return self._follow_external_dependency
class Graph(object):
def __init__(self):
self._nodes = {}
self._node2type = {}
self._edges = {}
def _node_exists(self, node_id):
return self._node2type.get(node_id, None) is not None
def add_node(
self,
node_type: str,
node_id: str,
name_to_show: str = None,
obj: object = None
):
if self._nodes.get(node_type, None) is None:
self._nodes[node_type] = {}
if self._nodes[node_type].get(node_id, None) is None and self._node2type.get(node_id, None):
_logger.exception(
"A different type of node with the same id: %s already exists",
node_id,
)
raise IdAlreadyExistsException(f"A different type of node with the same id: {node_id} already exists")
if self._nodes[node_type].get(node_id):
_logger.debug("Node with name: %s already exists", node_id)
return
self._node2type[node_id] = node_type
self._nodes[node_type][node_id] = Node(node_id, name_to_show, obj)
def get_node(self, node_id: str):
if not self._node_exists(node_id):
return None
return self._nodes[self._node2type[node_id]][node_id]
def get_nodes(self, node_type):
return self._nodes.get(node_type, None)
def add_edge(self, from_node_id, to_node_id, **attributes):
from_node = self.get_node(from_node_id)
to_node = self.get_node(to_node_id)
if from_node is None:
_logger.exception(
"Adding edge (%s, %s), %s does not exist in graph",
from_node_id,
to_node_id,
from_node_id,
)
if to_node is None:
_logger.exception(
"Adding edge (%s, %s), %s does not exist in graph",
from_node_id,
to_node_id,
to_node_id,
)
from_node.add_child(to_node_id)
to_node.add_parent(from_node_id)
self._edges[(from_node_id, to_node_id)] = Edge(**attributes)
def get_type(self, node_id):
if not self._node_exists(node_id):
return None
return self._node2type[node_id]
def get_edge(self, from_node_id, to_node_id):
return self._edges.get((from_node_id, to_node_id))
class TaskGraph:
NODE_TYPE_PIPELINE = "pipeline"
NODE_TYPE_TASK = "task"
NODE_TYPE_DATASET = "dataset"
def __init__(self):
self._graph = Graph()
def add_pipeline(self, pipeline: dagger.pipeline.pipeline.Pipeline):
self._graph.add_node(
node_type=self.NODE_TYPE_PIPELINE, node_id=pipeline.name, obj=pipeline
)
for task in pipeline.tasks:
self.add_task(task)
self._graph.add_edge(pipeline.name, task.uniq_name)
def add_task(self, task: Task):
self._graph.add_node(
node_type=self.NODE_TYPE_TASK,
node_id=task.uniq_name,
name_to_show=task.name,
obj=task,
)
for task_input in task.inputs:
self.add_dataset(task_input)
if task_input.has_dependency:
self._graph.add_edge(
task_input.alias(),
task.uniq_name,
follow_external_dependency=task_input.follow_external_dependency
)
for task_output in task.outputs:
self.add_dataset(task_output)
if task_output.has_dependency:
self._graph.add_edge(task.uniq_name, task_output.alias())
def add_dataset(self, io: IO):
self._graph.add_node(node_type=self.NODE_TYPE_DATASET, node_id=io.alias(), obj=io)
def print_graph(self, out_file=None):
fs = open(out_file, "w") if out_file else sys.stdout
for pipe_id, node in self._graph.get_nodes(self.NODE_TYPE_PIPELINE).items():
fs.write(f"Pipeline: {pipe_id}\n")
for node_id in list(node.children):
child_node = self._graph.get_node(node_id)
fs.write(f"\t task: {child_node.name}\n")
fs.write(f"\t inputs:\n")
for parent_id in list(child_node.parents):
if self._graph.get_type(parent_id) == self.NODE_TYPE_DATASET:
parent_node = self._graph.get_node(parent_id)
fs.write(f"\t\t {parent_node.name}\n")
fs.write(f"\t outputs:\n")
for output_id in list(child_node.children):
output_node = self._graph.get_node(output_id)
fs.write(f"\t\t {output_node.name}\n")
for output_task_id in list(output_node.children):
task_node = self._graph.get_node(output_task_id)
fs.write(f"\t\t\t dependency: {task_node.name}\n")
fs.write("\n")
fs.write("\n")
```
#### File: pipeline/ios/dummy_io.py
```python
from dagger.pipeline.io import IO
class DummyIO(IO):
ref_name = "dummy"
@classmethod
def init_attributes(cls, orig_cls):
cls.add_config_attributes([])
def __init__(self, io_config, config_location):
super().__init__(io_config, config_location)
def alias(self):
return "dummy://{}".format(self._name)
@property
def rendered_name(self):
return "{}".format(self._name)
@property
def airflow_name(self):
return "dummy-{}".format(self._name)
```
#### File: pipeline/ios/s3_io.py
```python
from os.path import join, normpath
from dagger.pipeline.io import IO
from dagger.utilities.config_validator import Attribute
class S3IO(IO):
ref_name = "s3"
@classmethod
def init_attributes(cls, orig_cls):
cls.add_config_attributes(
[
Attribute(
attribute_name="s3_protocol",
required=False,
comment="S3 protocol: s3a/s3/s3n",
),
Attribute(attribute_name="bucket"),
Attribute(attribute_name="path"),
]
)
def __init__(self, io_config, config_location):
super().__init__(io_config, config_location)
self._s3_protocol = self.parse_attribute("s3_protocol") or "s3"
self._bucket = normpath(self.parse_attribute("bucket"))
self._path = normpath(self.parse_attribute("path"))
def alias(self):
return "s3://{path}".format(path=join(self._bucket, self._path))
@property
def rendered_name(self):
return "{protocol}://{path}".format(
protocol=self._s3_protocol, path=join(self._bucket, self._path)
)
@property
def airflow_name(self):
return "s3-{}".format(join(self._bucket, self._path).replace("/", "-"))
@property
def bucket(self):
return self._bucket
@property
def path(self):
return self._path
```
#### File: pipeline/tasks/python_task.py
```python
from dagger.pipeline.task import Task
from dagger.utilities.config_validator import Attribute
class PythonTask(Task):
ref_name = "python"
@classmethod
def init_attributes(cls, orig_cls):
cls.add_config_attributes(
[
Attribute(
attribute_name="python",
parent_fields=["task_parameters"],
comment="Relative path to python file that implements the function",
),
Attribute(
attribute_name="function",
parent_fields=["task_parameters"],
comment="Name of the function",
),
]
)
def __init__(self, name, pipeline_name, pipeline, job_config):
super().__init__(name, pipeline_name, pipeline, job_config)
self._python = self.parse_attribute("python")
self._function = self.parse_attribute("function")
@property
def python(self):
return self._python
@property
def function(self):
return self._function
```
#### File: dagger/utilities/exceptions.py
```python
class DaggerMissingFieldException(Exception):
def __init__(self, message):
super().__init__(message)
class DaggerFieldFormatException(Exception):
def __init__(self, message):
super().__init__(message)
class InvalidConfigException(Exception):
def __init__(self, message):
super().__init__(message)
class IdAlreadyExistsException(Exception):
def __init__(self, message):
super().__init__(message)
```
#### File: jorgetagle/dagger/setup.py
```python
from pathlib import Path
from setuptools import find_packages, setup
with open("README.rst") as readme_file:
readme = readme_file.read()
with open("HISTORY.rst") as history_file:
history = history_file.read()
def strip_comments(l):
return l.split("#", 1)[0].strip()
def _pip_requirement(req, *root):
print(req)
if req.startswith("-r "):
_, path = req.split()
return reqs(*root, *path.split("/"))
return [req]
def _reqs(*f):
path = (Path.cwd() / "reqs").joinpath(*f)
with path.open() as fh:
reqs = [strip_comments(l) for l in fh.readlines()]
return [_pip_requirement(r, *f[:-1]) for r in reqs if r]
def reqs(*f):
return [req for subreq in _reqs(*f) for req in subreq]
install_requires = reqs("base.txt")
test_requires = reqs("test.txt") + install_requires
setup(
author="<NAME>",
author_email="<EMAIL>",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
description="Config Driven ETL",
entry_points={"console_scripts": ["dagger=dagger.main:cli"]},
long_description=readme + "\n\n" + history,
include_package_data=True,
keywords="dagger",
name="dagger",
setup_requires=install_requires,
install_requires=install_requires,
test_suite="tests",
packages=find_packages(),
tests_require=test_requires,
url="https://gitlab.com/goflash1/data/dagger",
version="0.9.0",
zip_safe=False,
)
```
#### File: pipeline/ios/test_gdrive_io.py
```python
import unittest
from dagger.pipeline.io_factory import gdrive_io
import yaml
class GDriveIOTest(unittest.TestCase):
def setUp(self) -> None:
with open('tests/fixtures/pipeline/ios/gdrive_io.yaml', "r") as stream:
config = yaml.safe_load(stream)
self.db_io = gdrive_io.GDriveIO(config, "/")
def test_properties(self):
self.assertEqual(self.db_io.alias(), "gdrive://test_folder/test_file_name")
self.assertEqual(self.db_io.rendered_name, "test_folder/test_file_name")
self.assertEqual(self.db_io.airflow_name, "gdrive-test_folder-test_file_name")
``` |
{
"source": "jorgetavares/pygenome",
"score": 2
} |
#### File: pygenome/examples/es_adaptive_sphere.py
```python
import numpy as np
import pygenome as pg
# config ES, minimization fitness function required
sphere_model_size = 30
def fitness_fn(x): return pg.sphere_model(x[:sphere_model_size])
# ES 1
def es_mu_comma_lambda_one_step():
np.random.seed(42)
pop = pg.evolutionary_strategy_adaptive(
fitness_fn, sphere_model_size, -5.0, 5.0)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# ES 2
def es_mu_plus_lambda_one_step():
np.random.seed(42)
pop = pg.evolutionary_strategy_adaptive(
fitness_fn, sphere_model_size, -5.0, 5.0, replace_pop=pg.mu_plus_lambda_replacement)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# ES 3
def es_mu_comma_lambda_n_step():
np.random.seed(42)
pop = pg.evolutionary_strategy_adaptive(
fitness_fn, sphere_model_size, -5.0, 5.0, mt=pg.uncorrelated_n_steps_mutation_adaptive, pop_size=100, pool_size=500)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# ES 4
def es_mu_plus_lambda_n_step():
np.random.seed(42)
pop = pg.evolutionary_strategy_adaptive(fitness_fn, sphere_model_size, -5.0, 5.0, replace_pop=pg.mu_plus_lambda_replacement,
mt=pg.uncorrelated_n_steps_mutation_adaptive, pop_size=100, pool_size=500)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# entry point
if __name__ == "__main__":
print('ES 1: mu,lambda, one step mutation')
es_mu_comma_lambda_one_step()
print('ES 2: mu+lambda, one step mutation')
es_mu_plus_lambda_one_step()
print('ES 3: mu,lambda, n step mutation')
es_mu_comma_lambda_n_step()
print('ES 4: mu+lambda, n step mutation')
es_mu_plus_lambda_n_step()
```
#### File: pygenome/examples/ga_permutation.py
```python
import numpy as np
import pygenome as pg
# fitness function: measures the sortness of a permutation
def sorted_permutation(vector):
unsorted = vector.size
for i in range(vector.size):
if vector[i] == i:
unsorted -= 1
return unsorted
permutation_size = 10
# GA 1
def generational_no_elitism():
np.random.seed(42)
pop = pg.genetic_algorithm_permutation(
sorted_permutation, permutation_size, total_generations=25)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# GA 2
def generational_with_elitism():
np.random.seed(42)
pop = pg.genetic_algorithm_permutation(
sorted_permutation, permutation_size, total_generations=25, elitism=True)
best = pg.best_individual(pop)
print('fitness: %s\tgenotype: %s' % (best.fitness.value, best.genotype))
# entry point
if __name__ == "__main__":
print('GA 1: generational, no elitism')
generational_no_elitism()
print('GA 2: generational, with elitism')
generational_with_elitism()
```
#### File: pygenome/representations/individual.py
```python
import numpy as np
from copy import deepcopy
class Individual(object):
'''
Base class for all type of individuals
'''
def __init__(self, fitness=None, genotype=None, parameters=None):
# this should be an object since fitness can be more than a simple value
self.fitness = fitness
self.genotype = genotype # this should be an object that can be of any type
self.parameters = parameters # this allows self-adaptive parameters
self.run_eval = True # the individual requires evaluation
def clone(self):
return deepcopy(self)
def equal(self, ind):
if self.fitness is not None:
fit = self.fitness.equal(ind.fitness)
else:
fit = self.fitness == ind.fitness
gen = np.array_equal(self.genotype, ind.genotype)
par = np.array_equal(self.parameters, ind.parameters)
eva = self.run_eval == ind.run_eval
return fit and gen and eva and par
class TreeIndividual(Individual):
'''
Class for GP based individuals
'''
def __init__(self, fitness=None, tree=None, depth=None, nodes=None, parameters=None):
super().__init__(fitness=fitness, genotype=tree, parameters=parameters)
self.depth = depth
self.nodes = nodes
def equal(self, ind):
depth = self.depth == ind.depth
nodes = self.nodes == ind.nodes
return super().equal(ind) and depth and nodes
```
#### File: pygenome/representations/linear.py
```python
import numpy as np
def integer_chromossome(size, low=0, high=1):
'''
Integer Chromossome (random)
Args:
size (int): the fixed size of the chromossome
min_value (int): minimum value that can be in the chromossome
max_value (int): maximum value (inclusive) that can be in the chromossome
Returns:
fixed size chromossome initialized with random integers
'''
high_value = high + 1
chromossome = np.random.randint(low, high=high_value, size=size)
return chromossome
def permutation_chromossome(size):
'''
Permutation Chromossome (random)
Args:
size (int): the fixed size of the chromossome
Returns:
fixed size chromossome initialized with a random permutation from 0 to size -1
'''
return np.random.permutation(size)
def uniform_chromossome(size, low=0.0, high=1.0):
'''
Uniform Chromossome (random)
Args:
size (int): the fixed size of the chromossome
min_value (float): minimum value that can be in the chromossome
max_value (float): maximum value (exclusive) that can be in the chromossome
Returns:
fixed size chromossome initialized with random floats using a uniform distribution
'''
return np.random.uniform(low=low, high=high, size=size)
def normal_chromossome(size, mean=0.0, sigma=1.0):
'''
Normal Chromossome (random)
Args:
size (int): the fixed size of the chromossome
mean (float): mean of the normal distribution
sigma (float): sigma of the normal distribution
Returns:
fixed size chromossome initialized with random floats using a normal distribution
'''
return np.random.normal(loc=mean, scale=sigma, size=size)
```
#### File: pygenome/representations/population.py
```python
import numpy as np
from copy import deepcopy
import pygenome as pg
from pygenome.representations.tree import grow_tree
class Population(object):
'''
Base class for a population
'''
def __init__(self, individuals=None):
self.individuals = individuals
self.size = individuals.size if individuals is not None else 0
def clone(self):
return deepcopy(self)
def equal(self, pop):
same_size = self.size == pop.size
same_individuals = True
for i in range(self.size):
if not self.individuals[i].equal(pop.individuals[i]):
same_individuals = False
break
return same_size and same_individuals
def make_empty_population(size):
'''
Make Empty Population
Args:
size (int): number of individuals that the population will contain
Returns:
empty population of fixed size to store new individuals
'''
return Population(individuals=np.empty(size, dtype=object))
def make_generic_population(size, make_individual_fn, *args, **kargs):
'''
Make Generic Population
Args:
size (int): number of individuals in the population
make_individual_fn (function): function that returns a random individual
Return:
random population of fixed size without fitness evaluation
'''
pop = make_empty_population(size)
for i in range(size):
pop.individuals[i] = pg.Individual()
pop.individuals[i].genotype = make_individual_fn(*args, **kargs)
return pop
def make_adaptive_population(pop, make_individual_fn, *args, **kargs):
for i in range(pop.size):
pop.individuals[i].parameters = make_individual_fn(*args, **kargs)
return pop
def make_integer_population(size, ind_size, low=0, high=1):
'''
Make Integer Population
Args:
size (int): number of individuals in the Population
ind_size (int): the fixed size of the chromossome
low (int): minimum value that can be in the chromossome
high (int): maximum value (inclusive) that can be in the chromossome
Returns:
array of individuals randomly initialized to have integer chromossomes.
if low and high are None, it generates a permutation from 0 to ind_size - 1
'''
if low is None and high is None:
individual_type = pg.permutation_chromossome
pop = make_generic_population(size, individual_type, ind_size)
else:
individual_type = pg.integer_chromossome
pop = make_generic_population(
size, individual_type, ind_size, low=low, high=high)
return pop
def make_uniform_population(size, ind_size, low=0.0, high=1.0):
'''
Make Uniform Population
Args:
size (int): number of individuals in the Population
ind_size (int): the fixed size of the chromossome, defines the permutation range
low (float): minimum value that can be in the chromossome
high (float): maximum value (exclusive) that can be in the chromossome
Returns:
array of individuals randomly initialized with a uniform distribution
'''
return make_generic_population(size, pg.uniform_chromossome, ind_size, low=low, high=high)
def make_normal_population(size, ind_size, mean=0.0, sigma=1.0):
'''
Make Normal Population
Args:
size (int): number of individuals in the Population
ind_size (int): the fixed size of the chromossome, defines the permutation range
mean (float): mean of the normal distribution
sigma (float): sigma of the normal distribution
Returns:
array of individuals randomly initialized with a normal distribution
'''
return make_generic_population(size, pg.normal_chromossome, ind_size, mean=mean, sigma=sigma)
def make_tree_population(size, pset, init_max_depth, max_depth, initial_type=None, init_method=grow_tree):
'''
Make Tree Population
Args:
size (int): number of individuals in the Population
pset (PrimitiveSet): set of primitives to build a random tree
init_max_depth (int): initial max tree depth
max_depth (int): max tree depth that translates into max array size
initial_type (type): when using types, this constraints the initial primitive ot be of this type
init_method (function): function that generates random trees (grow_tree, full_tree)
Returns:
array of tree based individuals initialized according to given method, with or without types
'''
pop = make_empty_population(size)
for i in range(size):
pop.individuals[i] = pg.TreeIndividual()
pop.individuals[i].genotype = init_method(
pset, init_max_depth, max_depth, initial_type=initial_type)
depth, nodes = pg.count_tree_internals(
pset, pop.individuals[i].genotype)
pop.individuals[i].depth = depth
pop.individuals[i].nodes = nodes
return pop
```
#### File: tests/examples/test_ga_onemax.py
```python
from examples.ga_onemax import *
stdout1 = """0 0.03125 0.040781302009688776 0.005561455106895243
1 0.029411764705882353 0.038091861610110966 0.00450622600604616
2 0.027777777777777776 0.03547228263920674 0.00378636872370032
3 0.027777777777777776 0.03383622027764222 0.003792657676482543
4 0.027777777777777776 0.03153144879877114 0.0024635362886572016
5 0.023809523809523808 0.030139024953537853 0.00243139455636538
6 0.023809523809523808 0.02894388940095638 0.0026576983008068868
7 0.023809523809523808 0.02740366108153792 0.001826310539891214
8 0.023255813953488372 0.026444187681938076 0.0015349653277309185
9 0.022727272727272728 0.026012870101612462 0.001608538168134231
10 0.022222222222222223 0.025314390354864127 0.0013064223948593403
11 0.022222222222222223 0.02475279874881244 0.0014170379402423956
12 0.02127659574468085 0.024026041106628093 0.0013427418981510168
13 0.02127659574468085 0.0233757082989196 0.0012006885907910165
14 0.020833333333333332 0.02285467855630095 0.0010185863389449473
15 0.02 0.022430398717967374 0.0008704333997032909
16 0.02 0.021960350829972216 0.0008949697776471712
17 0.02 0.021653716984652648 0.0007101590492949621
18 0.02 0.021357860050448662 0.000618545520306597
19 0.02 0.02111156184156859 0.0006393216238278883
fitness: 0.02 genotype: [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
1 1 1 1 1 1 1 1 1 1 1 1 1]
"""
stdout2 = """0 0.03125 0.040781302009688776 0.005561455106895243
1 0.029411764705882353 0.03805953402390407 0.004543443870432798
2 0.027777777777777776 0.03540714856298931 0.004003729039375085
3 0.027777777777777776 0.03338954942095523 0.0032194506876552006
4 0.025 0.031759630111261566 0.0024138303577230054
5 0.024390243902439025 0.030465830885796095 0.0024506760085341995
6 0.024390243902439025 0.029748169948644855 0.002240931141267259
7 0.023255813953488372 0.02878293987059667 0.0021429324656018814
8 0.023255813953488372 0.027502249635484855 0.0019123397613806427
9 0.023255813953488372 0.02645550691596207 0.0018288473202157202
10 0.022222222222222223 0.025367100470720813 0.0015199281190212102
11 0.022222222222222223 0.02461907262781222 0.0013499923545325775
12 0.021739130434782608 0.024065322825332153 0.0011729776361822577
13 0.02127659574468085 0.023461332182942187 0.0010670316985843752
14 0.02127659574468085 0.02289507628617888 0.0009396105298584204
15 0.020833333333333332 0.022522016567904247 0.0008268338171416158
16 0.02040816326530612 0.022136339976635826 0.0007804385336199252
17 0.02 0.021817609951539876 0.0008327779489794365
18 0.02 0.02147309566758398 0.0007178798030896314
19 0.02 0.021207604712420763 0.0006741417655733425
fitness: 0.02 genotype: [1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
1 1 1 1 1 1 1 1 1 1 1 1 1]
"""
stdout3 = """0 0.03125 0.040781302009688776 0.005561455106895243
1 0.03125 0.04076591739430416 0.0055657214195924745
2 0.03125 0.040622042750429514 0.005507164564763866
3 0.03125 0.04062719572949554 0.005525504386535372
4 0.03125 0.040572560348773205 0.0054984646902999194
5 0.03125 0.04034090492142615 0.005390384072367683
6 0.03125 0.0402046536026411 0.005419778941160164
7 0.03125 0.04013739327676008 0.0054623937675048004
8 0.03125 0.04007443031379712 0.005338017400020037
9 0.03125 0.04008458848257598 0.005319762685780308
10 0.03125 0.04010683542362159 0.005291563125455103
11 0.03125 0.0400226603394465 0.005272742309413064
12 0.03125 0.04005228996907613 0.0052641997698029625
13 0.03125 0.03998787773075085 0.005261292143217217
14 0.03125 0.03984664065698004 0.005316203083084664
15 0.03125 0.03988885010781018 0.005299466931565621
16 0.03125 0.03976902252160328 0.005410194445432538
17 0.03125 0.03968429308554145 0.005435280260514713
18 0.03125 0.03951577456702293 0.005206081471009931
19 0.03125 0.03935236039498807 0.005161212358951413
20 0.03125 0.03943844448969286 0.005138235191023557
21 0.03125 0.03921225401350238 0.005069181577556729
22 0.03125 0.039104984632319956 0.0050662190117403486
23 0.03125 0.03912662965396498 0.005097826009881623
24 0.03125 0.039124976214811535 0.00504608772719245
25 0.03125 0.03907867991851524 0.005043793314564134
26 0.03125 0.03904662863646396 0.005037425492953105
27 0.03125 0.03907867991851524 0.005043793314564134
28 0.03125 0.03896815036186499 0.004962528399866003
29 0.03125 0.03900793816027348 0.004942315673450242
30 0.03125 0.03895114567739404 0.00488617726647651
31 0.03125 0.03895768754534895 0.00495978158921641
32 0.03125 0.0388843550359764 0.005016909124594712
33 0.03125 0.03876251595551663 0.0050627579498394104
34 0.029411764705882353 0.03871180601636856 0.00513031663950765
35 0.029411764705882353 0.03877376323220508 0.005091848287170193
36 0.029411764705882353 0.03876419872264057 0.005108817760699437
37 0.029411764705882353 0.0385806766181466 0.0051765377035178336
38 0.029411764705882353 0.03857604469093403 0.00516256817573713
39 0.029411764705882353 0.03840009933028102 0.004911792561082685
40 0.029411764705882353 0.038435941798657225 0.004981197894576194
41 0.029411764705882353 0.03853501357514836 0.004917322984771472
42 0.029411764705882353 0.038551071591206375 0.004878023381996317
43 0.029411764705882353 0.03850821444834923 0.004883928233132197
44 0.029411764705882353 0.03848615317628796 0.0049545125688823305
45 0.029411764705882353 0.0383247775149123 0.004630315762456711
46 0.029411764705882353 0.03835051678203087 0.004617491935241724
47 0.029411764705882353 0.03835225004238483 0.004622889754411538
48 0.029411764705882353 0.03829598890156486 0.00461175402246592
49 0.029411764705882353 0.038335183040759 0.004595201177906614
50 0.029411764705882353 0.03834498738176467 0.004631145416784666
51 0.029411764705882353 0.03824933877749539 0.0046539394757127055
52 0.029411764705882353 0.03820648163463824 0.004657352542172028
53 0.029411764705882353 0.03811561436985793 0.004638459235855089
54 0.029411764705882353 0.038042281860485375 0.004686336005487959
55 0.029411764705882353 0.03805401594480011 0.004728978747106032
56 0.029411764705882353 0.037953681496639566 0.004665623840500757
57 0.029411764705882353 0.03790496354792162 0.004563527561674136
58 0.029411764705882353 0.037946760831098214 0.0045125666497372
59 0.029411764705882353 0.03786342749776488 0.0045200413120227495
60 0.029411764705882353 0.03787299200732939 0.004502766305273224
61 0.029411764705882353 0.03772479343171146 0.004577197733181666
62 0.029411764705882353 0.03756110295552098 0.004590515018741406
63 0.029411764705882353 0.0375312087756268 0.004581945112910876
64 0.02857142857142857 0.037414501408919436 0.004649577397769239
65 0.02857142857142857 0.03745978020419824 0.004664641840042881
66 0.02857142857142857 0.03744054943496747 0.004699542302834623
67 0.029411764705882353 0.037476425323784535 0.00468220417022404
68 0.029411764705882353 0.03743274716286499 0.00467600604283492
69 0.029411764705882353 0.037407749460447934 0.004685628116342981
70 0.029411764705882353 0.037317251722891374 0.0047513339193878245
71 0.029411764705882353 0.0371447649504046 0.004644532507889975
72 0.029411764705882353 0.03723060921366822 0.004606305036740161
73 0.029411764705882353 0.03719333234519001 0.004568490742026526
74 0.029411764705882353 0.037072499011856684 0.004570371800346553
75 0.029411764705882353 0.03725888669824436 0.00463564051398577
76 0.029411764705882353 0.037377996564413055 0.004572851829122976
77 0.029411764705882353 0.037349533566310586 0.004613310651506204
78 0.029411764705882353 0.037411568305764684 0.004586057330254607
79 0.029411764705882353 0.03731581914751553 0.0045754960654208286
80 0.029411764705882353 0.03728618951788589 0.004567604980281492
81 0.029411764705882353 0.03715757738927377 0.004466345151557656
82 0.029411764705882353 0.03718413767445474 0.0044624741750795074
83 0.029411764705882353 0.037248896144892726 0.004402375850256563
84 0.029411764705882353 0.037142274455737305 0.0043518420263156665
85 0.029411764705882353 0.03697717037183455 0.004176873824141705
86 0.029411764705882353 0.03691124526986941 0.004183622694639944
87 0.029411764705882353 0.03684504480366894 0.004240558604486811
88 0.029411764705882353 0.03679626460782992 0.004306445175176725
89 0.029411764705882353 0.03680857987876588 0.004301569721001867
90 0.029411764705882353 0.03678527172037708 0.004314663123220094
91 0.029411764705882353 0.03669090939471609 0.004363215285960536
92 0.029411764705882353 0.0367041369079436 0.004362239421426741
93 0.029411764705882353 0.03664874409743641 0.004292859713942862
94 0.029411764705882353 0.036647250668523625 0.004343984692202518
95 0.029411764705882353 0.03642403638280934 0.004202931967059495
96 0.029411764705882353 0.036346617027970624 0.004207650344462092
97 0.029411764705882353 0.03613744536746657 0.004189380043502483
98 0.029411764705882353 0.036101821938141807 0.00420205885430731
99 0.029411764705882353 0.03602970655352642 0.004222773629600652
100 0.029411764705882353 0.03592388644770632 0.004060497274442531
101 0.029411764705882353 0.03595217999324124 0.004071462517889265
102 0.029411764705882353 0.03592819979184169 0.004078346489282658
103 0.029411764705882353 0.035853086669669736 0.004133926322621262
104 0.029411764705882353 0.03584505137280204 0.004094286794650818
105 0.029411764705882353 0.035880376696565505 0.004032672462065162
106 0.029411764705882353 0.03591315319426135 0.004063742414963224
107 0.029411764705882353 0.0357711077397159 0.00397503381350392
108 0.029411764705882353 0.035768356978344455 0.003962536919820571
109 0.029411764705882353 0.03567339676614286 0.003933717881361394
110 0.029411764705882353 0.035622114714860806 0.003930461569449365
111 0.029411764705882353 0.03558141467416077 0.003917447924549628
112 0.029411764705882353 0.03561374226036766 0.0038948442958828483
113 0.029411764705882353 0.03554190317990789 0.003848514458455151
114 0.029411764705882353 0.03561523568928045 0.0038381428516082675
115 0.029411764705882353 0.03556268571793181 0.0038805391979271028
116 0.029411764705882353 0.035620556088302176 0.003858883261254072
117 0.029411764705882353 0.03558076828989369 0.0038498871382687747
118 0.029411764705882353 0.03547102134884809 0.003860076863019065
119 0.029411764705882353 0.035436682268388316 0.0038172229775300655
120 0.029411764705882353 0.03537120607791212 0.003856482710039714
121 0.029411764705882353 0.03528586318006923 0.0038617281680746883
122 0.029411764705882353 0.035275782534907936 0.0038709235452645437
123 0.029411764705882353 0.035280937374621596 0.0038435826251394536
124 0.02857142857142857 0.03525930650004955 0.0038534921506053663
125 0.027777777777777776 0.03522458427782733 0.003904764199816617
126 0.027777777777777776 0.035022273816896175 0.003832353761110754
127 0.027777777777777776 0.034827144152127044 0.003559835915024719
128 0.027777777777777776 0.03472864448776989 0.0036016624391633514
129 0.027777777777777776 0.03472156387774811 0.0035043721635600886
130 0.027777777777777776 0.03465311149679572 0.0035207979205571158
131 0.027777777777777776 0.0346293019729862 0.0035215917339726003
132 0.027777777777777776 0.03459457975076398 0.003571503846812583
133 0.027777777777777776 0.03453844264074756 0.0035871430778440278
134 0.027777777777777776 0.03458399774018197 0.0035785494716660954
135 0.027777777777777776 0.034554977701081284 0.0036064459984184467
136 0.027777777777777776 0.034235458984503746 0.00323394946738343
137 0.027777777777777776 0.03422396473163018 0.003235092545128326
138 0.027777777777777776 0.03420949428828043 0.003273869605456437
139 0.027777777777777776 0.03419403214927824 0.003254116560130402
140 0.027777777777777776 0.03415162391791005 0.0032745242857899013
141 0.027777777777777776 0.0342087126624988 0.003217240720355445
142 0.027777777777777776 0.0342087126624988 0.003217240720355445
143 0.027777777777777776 0.034191320780187565 0.003184752436255108
144 0.027777777777777776 0.034036864125730905 0.0032643911269782584
145 0.027777777777777776 0.03390271400166143 0.003218074845557746
146 0.027777777777777776 0.033818057917005345 0.0032457395603201866
147 0.027777777777777776 0.03377779382286194 0.003268686844476118
148 0.027777777777777776 0.03374749079255891 0.003286663928290463
149 0.027777777777777776 0.033735324496674594 0.0032830776385959176
150 0.027777777777777776 0.033504161797481204 0.0031506200813113897
151 0.027777777777777776 0.03348332846414787 0.003158558769349625
152 0.027777777777777776 0.033526568748767464 0.0030999246087808903
153 0.027777777777777776 0.03346154349624222 0.00316030081196644
154 0.027777777777777776 0.03342091420225543 0.003178836757681714
155 0.027777777777777776 0.03336680164814288 0.003185387171859075
156 0.027777777777777776 0.03339245120415296 0.003201781315556209
157 0.02702702702702703 0.03342415938291997 0.003190911336279097
158 0.02702702702702703 0.033372281454582084 0.0032092216966512163
159 0.02702702702702703 0.033300852883153514 0.0032356813659525745
160 0.02702702702702703 0.03321699433973615 0.003255403901293605
161 0.027777777777777776 0.03326856314992565 0.0031931312795011164
162 0.027777777777777776 0.033162297655222016 0.0032588719730904867
163 0.027777777777777776 0.03317671334705706 0.0033321389733581017
164 0.027777777777777776 0.03309570037760596 0.0033620846391585702
165 0.027777777777777776 0.033067631672117896 0.003381712170645736
166 0.027777777777777776 0.03307771231727919 0.0033777791115661075
167 0.027777777777777776 0.03297205222169974 0.0033353899583604013
168 0.027777777777777776 0.03290062365027117 0.0033523395671306525
169 0.027777777777777776 0.03280394255359007 0.0033958866711734062
170 0.027777777777777776 0.032762145270413476 0.003400685321747771
171 0.027777777777777776 0.03276030523821292 0.0033929570661628797
172 0.027777777777777776 0.032687632812599315 0.0033452163657807653
173 0.027777777777777776 0.0325664847726526 0.0033138297659914503
174 0.02702702702702703 0.032502680144888024 0.0033544687941013283
175 0.02702702702702703 0.03239409983536958 0.0033327300870295787
176 0.02702702702702703 0.032357062798332545 0.0033013589668393216
177 0.027777777777777776 0.032392057155907546 0.003273357815197512
178 0.027777777777777776 0.0323320824084328 0.003328726877076974
179 0.027777777777777776 0.03221470580261806 0.0032446462457340615
180 0.027777777777777776 0.03221342281141571 0.00323851735811994
181 0.027777777777777776 0.03207602735143961 0.003260356365938651
182 0.02702702702702703 0.032014407289819545 0.0032550525488685205
183 0.02702702702702703 0.03194672860544257 0.0032723496623143747
184 0.02702702702702703 0.031909352951368625 0.0032874624668255393
185 0.02702702702702703 0.031909352951368625 0.0032874624668255393
186 0.02702702702702703 0.03200194554396121 0.0033001618731900856
187 0.02702702702702703 0.031925825645260676 0.0033184375182541037
188 0.02702702702702703 0.03188055592446913 0.0033422610548507936
189 0.02702702702702703 0.03196196348749434 0.003345614146100431
190 0.02702702702702703 0.0319227099482408 0.0033463680631940817
191 0.02702702702702703 0.031987178159767836 0.0033174240280473624
192 0.02702702702702703 0.03201641485444571 0.003311614185069456
193 0.02702702702702703 0.03196085929889016 0.0033355688856440712
194 0.02702702702702703 0.03186499720463684 0.0031114635289667385
195 0.02702702702702703 0.03179047774167924 0.0030888021593992667
196 0.02702702702702703 0.03181795026915177 0.003135444443992817
197 0.02702702702702703 0.03177739591609741 0.0030785962722787432
198 0.02702702702702703 0.03175840261496465 0.0030841676144967778
199 0.02702702702702703 0.03170284705940909 0.003105262167325866
fitness: 0.02702702702702703 genotype: [1 1 1 0 1 0 1 1 1 1 1 1 1 1 1 1 0 1 0 1 1 1 1 0 1 1 0 1 1 1 0 1 1 1 1 0 0
0 1 1 1 1 1 0 1 1 0 0 1 1]
"""
stdout4 = """0 0.03125 0.040781302009688776 0.005561455106895243
1 0.03125 0.04073358980809726 0.005611323894099167
2 0.03125 0.04053184479385226 0.005618896584066156
3 0.03125 0.04052943212031889 0.005707430033203208
4 0.03125 0.04031885132049323 0.005557155703438686
5 0.03125 0.04017681326595518 0.005550182390644313
6 0.03125 0.04007237029513291 0.005659834969094889
7 0.03125 0.03974061704954726 0.005675526261668921
8 0.03125 0.039619906293755856 0.005681785878437178
9 0.03125 0.03949760425033312 0.005732395162757329
10 0.03125 0.03957740110404302 0.005739659294499826
11 0.02857142857142857 0.03941847253261445 0.0058883086220128
12 0.02857142857142857 0.03946380220294412 0.005875490246256851
13 0.02857142857142857 0.03929806721329609 0.0059559672395683464
14 0.02857142857142857 0.0391270507297796 0.006101526459694825
15 0.02857142857142857 0.03888532756708457 0.006241677313710797
16 0.02857142857142857 0.038668706673044326 0.0063205121898296185
17 0.02857142857142857 0.03856764847198612 0.006393733325621937
18 0.02857142857142857 0.038311677123303065 0.006540601588919424
19 0.02857142857142857 0.038227021038646974 0.006610959070916546
20 0.02857142857142857 0.03792378604899895 0.006703191685388337
21 0.02857142857142857 0.03771059923581213 0.006825735384647775
22 0.02857142857142857 0.037652096883511114 0.006879646149613075
23 0.02857142857142857 0.03756052179193602 0.006806930000633567
24 0.02857142857142857 0.0372530984033113 0.006616244897318855
25 0.02857142857142857 0.037071472621685524 0.006700935418616673
26 0.027777777777777776 0.036894769026183265 0.00681638917178459
27 0.027777777777777776 0.03679332768336261 0.006737740324559595
28 0.027777777777777776 0.03656592636604193 0.006718795651361971
29 0.027777777777777776 0.03636299983863723 0.0067182066098195655
30 0.027777777777777776 0.03615974957582815 0.006784624195964068
31 0.027777777777777776 0.036001067461093515 0.006792736244762245
32 0.02631578947368421 0.03585705404487707 0.006886008196113005
33 0.02631578947368421 0.03571511934702132 0.006980966601862108
34 0.02631578947368421 0.03558208144338916 0.006975671272659737
35 0.02631578947368421 0.035523269641156305 0.007029830564216234
36 0.02631578947368421 0.035352630520894796 0.007109006244468943
37 0.02631578947368421 0.03525904015206289 0.00716713853183638
38 0.02631578947368421 0.035003150427752115 0.007261733438238009
39 0.02631578947368421 0.0350878065124082 0.007235558192867908
40 0.02631578947368421 0.03506031966234071 0.007253354166028904
41 0.02631578947368421 0.03497496379956549 0.007248977861899415
42 0.02631578947368421 0.03481186508054571 0.006966668179409498
43 0.02631578947368421 0.034732893345652924 0.006993106579244648
44 0.02631578947368421 0.03457633876475624 0.0070796600767005675
45 0.02564102564102564 0.03445953473545221 0.007096635586395174
46 0.02564102564102564 0.03424201890301702 0.007115120569419857
47 0.02564102564102564 0.03400009280129056 0.007168165801585374
48 0.02564102564102564 0.033670742784440544 0.00711148149312442
49 0.025 0.033436412300110054 0.007189450666750213
50 0.025 0.033447997635998795 0.007187477871410633
51 0.025 0.03322871184413236 0.007249243312145341
52 0.025 0.0330933317271733 0.00725059006156153
53 0.025 0.03295272671814723 0.007308861341128651
54 0.025 0.03287930504178438 0.007366395988270292
55 0.025 0.03278447745557748 0.007406073280120626
56 0.025 0.032792687082208165 0.007401201055329391
57 0.025 0.03268001985812789 0.0073843136398220365
58 0.025 0.03230008721972339 0.007245850271969602
59 0.024390243902439025 0.03214398965874778 0.007246430111445265
60 0.024390243902439025 0.032015866004383844 0.0072681425037237355
61 0.024390243902439025 0.031883485452205894 0.00733690135935937
62 0.024390243902439025 0.031816880996164566 0.007376902910834809
63 0.024390243902439025 0.03146209328162075 0.007177437385924868
64 0.024390243902439025 0.030962607235877355 0.0068235487821863006
65 0.024390243902439025 0.030309716409687423 0.006138468195036998
66 0.024390243902439025 0.029989853666525578 0.0056202135596107525
67 0.024390243902439025 0.029909236021992244 0.0056570168868638054
68 0.024390243902439025 0.029714197729741806 0.005507420236007412
69 0.024390243902439025 0.029641209851875877 0.005540760460082632
70 0.024390243902439025 0.029590994643841448 0.005565312649992668
71 0.024390243902439025 0.029540284704693373 0.00554356934473582
72 0.023809523809523808 0.0294674131318218 0.005574058368394143
73 0.023809523809523808 0.029395570937021352 0.005560822242024083
74 0.023809523809523808 0.029263551712821353 0.005507388169792296
75 0.023809523809523808 0.029057202506472146 0.0054044000172213495
76 0.023809523809523808 0.02892271087198051 0.005390781173143104
77 0.023809523809523808 0.028937998073724706 0.005386883261199939
78 0.023809523809523808 0.02888072253192904 0.005408732109380401
79 0.023809523809523808 0.02888038515001271 0.00540813338837751
80 0.023809523809523808 0.028692779962407523 0.005274723961496633
81 0.023809523809523808 0.02846658948621705 0.005154135225991897
82 0.023809523809523808 0.028378157592907106 0.005173921018975657
83 0.023809523809523808 0.028293672185841053 0.005178852024892103
84 0.023809523809523808 0.028122237340493163 0.004972342571789223
85 0.023809523809523808 0.02806820803646386 0.004994113363054957
86 0.023809523809523808 0.02806820803646386 0.004994113363054957
87 0.023809523809523808 0.02794674041499624 0.004918427003520906
88 0.023809523809523808 0.027886667587220818 0.004900505036397889
89 0.023809523809523808 0.0279223818729351 0.004892344487055345
90 0.023809523809523808 0.027903729472703757 0.004903536886326738
91 0.023809523809523808 0.027929532101447562 0.004909059012466715
92 0.023809523809523808 0.027844709312465177 0.004905805768405714
93 0.023809523809523808 0.027857217129851043 0.0048985714954845805
94 0.023809523809523808 0.027794784464477206 0.004915719554580067
95 0.023809523809523808 0.027662182081874816 0.004809379264527122
96 0.023809523809523808 0.027541684523877265 0.0048284192202885335
97 0.023809523809523808 0.027499010963501106 0.004810399870685516
98 0.023809523809523808 0.02735829801791011 0.004692016732470327
99 0.023809523809523808 0.027302208274320366 0.004678665596025819
100 0.023809523809523808 0.027162435766126806 0.004552932274250579
101 0.023809523809523808 0.027151233123345216 0.004561946442992086
102 0.023255813953488372 0.026918788873964664 0.004289242904213266
103 0.023255813953488372 0.026900743958018447 0.004299114587977271
104 0.023255813953488372 0.026793565879179342 0.004118194101992821
105 0.023255813953488372 0.026737543470215758 0.004120300907841031
106 0.023255813953488372 0.02677100203810976 0.004163631467732847
107 0.023255813953488372 0.026721347292490583 0.004163033936033469
108 0.023255813953488372 0.02671581019393023 0.004167269243327534
109 0.023255813953488372 0.02657680780408536 0.00417039084574992
110 0.023255813953488372 0.026560934788212344 0.004165409033319495
111 0.023255813953488372 0.02656301848582848 0.004170603683923384
112 0.023255813953488372 0.026610493881282317 0.00421484207235389
113 0.023255813953488372 0.026628808899597334 0.004206602264090147
114 0.023255813953488372 0.026584159261924437 0.0041969238651043605
115 0.023255813953488372 0.02658536980169067 0.00419997458125932
116 0.023255813953488372 0.026472222992579425 0.004215769526466076
117 0.023255813953488372 0.02645352799928223 0.004222125778576367
118 0.023255813953488372 0.026320351341118257 0.004090358032511461
119 0.023255813953488372 0.026302639378284347 0.00409903815479723
120 0.023255813953488372 0.026229175108855644 0.004103421645792556
121 0.023255813953488372 0.026271679625962364 0.004087112723481286
122 0.023255813953488372 0.02626493198763578 0.004087591304364971
123 0.023255813953488372 0.0262463266092743 0.004093708892816499
124 0.023255813953488372 0.02623594508257042 0.0041031299227546085
125 0.023255813953488372 0.026190075366976547 0.0041089287510517845
126 0.023255813953488372 0.026135623494899216 0.004129086542967953
127 0.023255813953488372 0.026205905789783716 0.004115175203514505
128 0.023255813953488372 0.026193308357462128 0.0041182653342320494
129 0.023255813953488372 0.025839045910176425 0.003477341432928507
130 0.023255813953488372 0.025808446154974466 0.0034864631254768698
131 0.023255813953488372 0.02573132158692534 0.0034937543640718286
132 0.023255813953488372 0.02578365360070166 0.0034924993721620577
133 0.023255813953488372 0.025728410740580746 0.0034834736685537
134 0.023255813953488372 0.025675772463750756 0.0035059864473457867
135 0.023255813953488372 0.02554115707913537 0.003262457391280839
136 0.023255813953488372 0.02549593744089248 0.0032624816665789286
137 0.023255813953488372 0.02547668198518003 0.003263268278986877
138 0.023255813953488372 0.025450604513980603 0.003262279428949987
139 0.023255813953488372 0.025469859969693056 0.003261646532635847
140 0.023255813953488372 0.025464052768763904 0.0032640796303695994
141 0.023255813953488372 0.025452708469274398 0.0032697583507197816
142 0.022727272727272728 0.02534261432019256 0.0032621280248064798
143 0.022727272727272728 0.025310961076684527 0.0032628251123270014
144 0.022727272727272728 0.025268708245875997 0.0032715108250115127
145 0.022727272727272728 0.025197252444862813 0.00319915986612111
146 0.022727272727272728 0.025157022302533968 0.003213736996065187
147 0.022727272727272728 0.02514619979171146 0.003220072617385478
148 0.022727272727272728 0.025114389734503608 0.003217563197988547
149 0.022727272727272728 0.025078504567039497 0.0032239688042125085
150 0.022727272727272728 0.02506685149143293 0.003227200421439853
151 0.022727272727272728 0.024913102015102807 0.0031401751832950456
152 0.022727272727272728 0.024918639113663165 0.0031377352236427146
153 0.022727272727272728 0.024871186566210617 0.003145316514677488
154 0.022727272727272728 0.0247821452525671 0.0031303927692320336
155 0.022727272727272728 0.024751797183663337 0.003131023043946763
156 0.022727272727272728 0.024734355323198228 0.0031344480686555398
157 0.022727272727272728 0.024764955078400184 0.0031348009257341713
158 0.022727272727272728 0.024731695654896857 0.0031473966182713883
159 0.022727272727272728 0.02466243158563279 0.0031346008819375867
160 0.022222222222222223 0.024615958814557814 0.003141382574566584
161 0.022222222222222223 0.024519277717876715 0.003105415519295516
162 0.022222222222222223 0.0245028829133367 0.0031112353636597737
163 0.022222222222222223 0.024462887678219518 0.0031180004659155784
164 0.022222222222222223 0.02443615695636684 0.0031291288953815715
165 0.022222222222222223 0.02443135813761454 0.0031331297745607885
166 0.022222222222222223 0.024439337238616896 0.003140412149256389
167 0.022222222222222223 0.02440873748341493 0.0031368851853292674
168 0.022222222222222223 0.024364213531914237 0.003147253659261851
169 0.022222222222222223 0.02435811597093863 0.00314660657645012
170 0.022222222222222223 0.024308657976358682 0.0031600312059756512
171 0.022222222222222223 0.02425625852605793 0.00313504764815793
172 0.022222222222222223 0.024229292896993604 0.0031436219889505058
173 0.022222222222222223 0.024208134468858434 0.003151748218016044
174 0.022222222222222223 0.024202849056596275 0.0031537833257092194
175 0.022222222222222223 0.024149029002776223 0.0031473309832006865
176 0.022222222222222223 0.024154566101336577 0.0031462415617485957
177 0.022222222222222223 0.0241283571681509 0.0031565107346521915
178 0.022222222222222223 0.02413892799267521 0.0031526836436063556
179 0.022222222222222223 0.024123541570312046 0.0031598933565761657
180 0.022222222222222223 0.024094404041174515 0.0031592000755098874
181 0.022222222222222223 0.0239782424250129 0.0031276457946775855
182 0.022222222222222223 0.023934105037852257 0.003133833954361933
183 0.022222222222222223 0.02383551408632013 0.003095805615909798
184 0.022222222222222223 0.02374288449369054 0.0030588974624381
185 0.022222222222222223 0.02375974911265385 0.0030586455350572798
186 0.022222222222222223 0.023684625405951197 0.003048970855806743
187 0.021739130434782608 0.023674509075814646 0.003052852873040262
188 0.021739130434782608 0.023637175430315575 0.0030593530822678734
189 0.021739130434782608 0.023637395017491684 0.003060049731537424
190 0.021739130434782608 0.023611640578693763 0.003067690138196303
191 0.021739130434782608 0.023567571121580824 0.0030682476009510212
192 0.021739130434782608 0.023530994171960397 0.003076225972762006
193 0.021739130434782608 0.023530994171960397 0.003076225972762006
194 0.021739130434782608 0.02352065825464774 0.003078868461455716
195 0.021739130434782608 0.02348593376225734 0.0030709428788565215
196 0.021739130434782608 0.02346522982850993 0.00307566702127991
197 0.021739130434782608 0.023272081629294998 0.002723000011150271
198 0.021739130434782608 0.02323116035531002 0.002732254763077608
199 0.021739130434782608 0.02323621086036053 0.0027308515244538496
fitness: 0.021739130434782608 genotype: [1 1 0 1 1 0 1 1 1 1 1 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 1
1 1 1 1 1 1 1 1 1 1 1 1 1]
"""
def test_generational_no_elitism(capfd):
generational_no_elitism()
out, err = capfd.readouterr()
assert out == stdout1
def test_generational_with_elitism(capfd):
generational_with_elitism()
out, err = capfd.readouterr()
assert out == stdout2
def test_steady_state_no_elitism(capfd):
steady_state_no_elitism()
out, err = capfd.readouterr()
assert out == stdout3
def test_steady_state_with_elitism(capfd):
steady_state_with_elitism()
out, err = capfd.readouterr()
assert out == stdout4
```
#### File: tests/examples/test_gp_symbolic_regression_ephemeral.py
```python
from examples.gp_symbolic_regression_ephemeral import *
stdout = """0 15.5 342.7222745822774 1366.9995897577076
1 15.5 174.4685625489089 844.2272957512475
2 15.5 124.54681372800574 98.97669147086395
3 15.5 123.06199596422003 172.773005271157
4 15.5 112.93167401603057 110.55449386625767
5 15.5 110.75156490776085 107.15816000429412
6 15.5 107.41878568168849 94.70862971364022
7 15.5 104.9594946174731 131.05722797231672
8 15.5 102.85428009866611 97.95860723478505
9 15.5 110.98935815106181 352.05102375014013
10 14.166666666666664 110.84648781792852 215.44631580465216
11 14.166666666666664 120.37369794446951 523.1855039602556
12 0.0 104.8690146278071 147.11730947200098
13 0.0 106.83970314838385 211.75884103989384
14 0.0 113.14368511729568 366.1095312746761
15 0.0 100.85097968877295 132.29635179185024
16 0.0 103.43094508171221 221.21510515586644
17 0.0 207.2485138038732 2357.3506013490055
18 0.0 104.79909815703387 162.5199523010139
19 0.0 103.18569110057368 305.4034617990897
fitness: 0.0 genotype: [4 6 4 7 6 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
"""
def test_gp_with_elitism_ephemeral(capfd):
gp_with_elitism_ephemeral()
out, err = capfd.readouterr()
assert out == stdout
```
#### File: tests/examples/test_gp_symbolic_regression.py
```python
from examples.gp_symbolic_regression import *
stdout = """0 46.5 156.4659622692552 183.61738555063005
1 46.5 132.9971535108812 30.718266130054925
2 32.5 128.5541883214168 54.31393951939565
3 32.5 120.6432016022899 54.4296163866599
4 32.5 112.05901424354084 34.25275505830754
5 32.5 108.69988522388344 48.21225456199498
6 0.0 106.13761318452147 52.95476292541651
7 0.0 103.21213720474007 66.94877798064552
8 0.0 101.00397496253747 61.29930195354359
9 0.0 94.84191895511894 51.188043754699315
10 0.0 97.43871518619832 84.23737266236267
11 0.0 103.65756445349886 104.62251784545246
12 0.0 102.80692416037579 218.1486786374937
13 0.0 95.94325216470521 104.76108149874408
14 0.0 102.49213635456861 156.2766645529315
15 0.0 111.87826823753666 411.059172261407
16 0.0 91.66089151082672 72.17293906498097
17 0.0 100.02009204358022 265.2424580943246
18 0.0 91.05697118808851 103.3961365429734
19 0.0 96.15183694459546 190.67018240424457
fitness: 0.0 genotype: [ 3 4 11 7 11 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
"""
def test_gp_with_elitism(capfd):
gp_with_elitism()
out, err = capfd.readouterr()
assert out == stdout
```
#### File: pygenome/operators/test_crossover.py
```python
import numpy as np
import pygenome as pg
def test_apply_crossover1():
np.random.seed(42)
size = 1000
ind_size = 100
rate = 1.0
operator = pg.uniform_crossover
pop = pg.make_integer_population(size, ind_size)
original_pop = pop.clone()
pop = pg.apply_crossover(pop, rate, operator)
for i in range(pop.size):
assert pop.individuals[i].run_eval is True
assert np.array_equal(pop.individuals[i].genotype, original_pop.individuals[i].genotype) is not True
def test_apply_crossover2():
np.random.seed(42)
size = 1000
ind_size = 100
rate = 0.0
operator = pg.uniform_crossover
pop = pg.make_integer_population(size, ind_size)
original_pop = pop.clone()
pop = pg.apply_crossover(pop, rate, operator)
for i in range(pop.size):
assert pop.individuals[i].run_eval is True
assert np.array_equal(pop.individuals[i].genotype, original_pop.individuals[i].genotype)
def test_one_point_crossover():
np.random.seed(42)
i1 = pg.Individual(genotype=np.array([0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1]))
i2 = pg.Individual(genotype=np.array([1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1]))
o1, o2 = pg.one_point_crossover(i1, i2)
assert np.array_equal(o1.genotype, np.array([0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1]))
assert np.array_equal(o2.genotype, np.array([1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1]))
def test_uniform_crossover():
np.random.seed(42)
i1 = pg.Individual(genotype=np.array([0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1]))
i2 = pg.Individual(genotype=np.array([1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1]))
o1, o2 = pg.uniform_crossover(i1, i2)
assert np.array_equal(o1.genotype, np.array([0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1]))
assert np.array_equal(o2.genotype, np.array([1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1]))
def test_partially_match_crossover():
np.random.seed(42)
i1 = pg.Individual(genotype=np.array([4, 3, 5, 7, 8, 1, 2, 0, 9, 6]))
i2 = pg.Individual(genotype=np.array([8, 9, 1, 3, 6, 5, 7, 4, 2, 0]))
o1, o2 = pg.partially_match_crossover(i1, i2)
assert np.array_equal(o1.genotype, np.array([4, 7, 8, 3, 6, 5, 1, 2, 0, 9]))
assert np.array_equal(o2.genotype, np.array([9, 3, 6, 7, 8, 1, 5, 4, 2, 0]))
def test_apply_global_crossover():
np.random.seed(42)
size = 1000
ind_size = 100
operator = pg.intermediary_crossover
pop = pg.make_uniform_population(size, ind_size)
original_pop = pop.clone()
pop = pg.apply_global_crossover(pop, operator)
for i in range(pop.size):
assert pop.individuals[i].run_eval is True
def test_intermediary_crossover():
np.random.seed(42)
i1 = pg.Individual(genotype=np.array([ 0.61185289, 0.13949386, 0.29214465, 0.36636184, 0.45606998, 0.78517596, 0.19967378, 0.51423444, 0.59241457, 0.04645041]))
i2 = pg.Individual(genotype=np.array([ 0.60754485, 0.17052412, 0.06505159, 0.94888554, 0.96563203, 0.80839735, 0.30461377, 0.09767211, 0.68423303, 0.44015249]))
o1 = pg.intermediary_crossover(i1, i2)
assert np.isclose(o1.genotype.all(), np.array([ 0.60969887, 0.15500899, 0.17859812, 0.65762369, 0.710851 ,
0.79678666, 0.25214378, 0.30595327, 0.6383238 , 0.24330145]).all())
def test_disrete_crossover():
np.random.seed(42)
i1 = pg.Individual(genotype=np.array([ 0.61185289, 0.13949386, 0.29214465, 0.36636184, 0.45606998,
0.78517596, 0.19967378, 0.51423444, 0.59241457, 0.04645041]))
i2 = pg.Individual(genotype=np.array([ 0.60754485, 0.17052412, 0.06505159, 0.94888554, 0.96563203,
0.80839735, 0.30461377, 0.09767211, 0.68423303, 0.44015249]))
o1 = pg.discrete_crossover(i1, i2)
assert np.isclose(o1.genotype.all(), np.array([ 0.61185289, 0.17052412, 0.06505159, 0.94888554, 0.45606998,
0.78517596, 0.19967378, 0.09767211, 0.68423303, 0.44015249]).all())
```
#### File: pygenome/representations/test_primitive_set.py
```python
import numpy as np
import pygenome as pg
import operator as op
# add non-typed functions
def test_primitive_set_add_functions():
pset = pg.PrimitiveSet()
pset.addFunction(op.add, 2)
pset.addFunction(op.sub, 2)
pset.addFunction(op.mul, 2)
assert pset.typed is False
assert pset.num_primitives == 3
assert list(pset.functions.keys()) == [1, 2, 3]
assert pset.arity_cache == {2: [1, 2, 3]}
assert pset.functions_types == {}
# add non-typed functions
def test_primitive_set_add_typed_functions():
pset = pg.PrimitiveSet(typed=True)
pset.addFunction(op.add, 2, [int, int, int])
pset.addFunction(op.sub, 2, [int, int, int])
pset.addFunction(op.mul, 2, [float, float, float])
assert pset.typed is True
assert pset.num_primitives == 3
assert list(pset.functions.keys()) == [1, 2, 3]
assert pset.arity_cache == {2: [1, 2, 3]}
assert pset.functions_types == {int: [1, 2], float: [3]}
# duplicate functions
def test_duplicate_entries():
pset = pg.PrimitiveSet()
pset.addFunction(op.add, 2)
pset.addFunction(op.add, 2)
pset.addTerminal(2)
pset.addTerminal(2)
pset.addVariable("x")
pset.addVariable("x")
assert pset.num_primitives == 3
assert list(pset.functions.keys()) == [1]
assert list(pset.terminals.keys()) == [2]
assert list(pset.variables.keys()) == [3]
assert pset.arity_cache == {2: [1]}
assert pset.primitives == set([op.add, 2, "x"])
def float_constants():
return np.random.uniform()
def test_make_ephemeral_constants():
pset = pg.PrimitiveSet()
pset.addFunction(op.add, 2)
pset.addFunction(op.sub, 2)
pset.addTerminal(float_constants, types=None, ephemeral=True)
assert pset.num_primitives == 3
assert list(pset.functions.keys()) == [1, 2]
assert list(pset.terminals.keys()) == [3]
assert list(pset.variables.keys()) == []
assert pset.arity_cache == {2: [1, 2]}
assert pset.primitives == set([op.add, op.sub, float_constants])
assert pset.ephemeral_cache == set([3])
def test_make_ephemeral_constants_typed():
pset = pg.PrimitiveSet(typed=True)
pset.addFunction(op.add, 2, [float, float, float])
pset.addFunction(op.sub, 2, [float, float, float])
pset.addTerminal(float_constants, types=[float], ephemeral=True)
assert pset.num_primitives == 3
assert list(pset.functions.keys()) == [1, 2]
assert list(pset.terminals.keys()) == [3]
assert list(pset.variables.keys()) == []
assert pset.arity_cache == {2: [1, 2]}
assert pset.primitives == set([op.add, op.sub, float_constants])
assert pset.ephemeral_cache == set([3])
assert pset.functions_types == {float: [1, 2]}
assert pset.terminals_types == {float: [3]}
``` |
{
"source": "jorgeteixe/algorithms",
"score": 4
} |
#### File: algebra/binary_gcd/binary_gcd.py
```python
def odd(n):
return n % 2 == 1
def even(n):
return n % 2 == 0
def binary_gcd(a, b):
if a < 0 or b < 0:
raise ArithmeticError('Numbers must be positive.')
if a == b:
return a
if a == 0:
return b
if b == 0:
return a
if even(a):
if odd(b):
return binary_gcd(a / 2, b)
else:
return 2 * binary_gcd(a / 2, b / 2)
else:
if even(b):
return binary_gcd(a, b / 2)
if a > b:
return binary_gcd((a - b) / 2, b)
else:
return binary_gcd((b - a) / 2, a)
def main():
a = 12
b = 20
gcd = binary_gcd(a, b)
print(f'gcm({a}, {b}) = {"%d" % gcd}')
if __name__ == '__main__':
main()
```
#### File: algebra/divisors/divisors.py
```python
from algebra.integer_division.integer_division import integer_division
def divisors(a):
divs = []
i = 1
while i <= a:
_, r = integer_division(+a, i)
if r == 0:
divs.append(i)
divs.append(-i)
i += 1
return divs
def main():
a = 12
divs = divisors(a)
print(f'a = {a}\ndivs = {divs}')
if __name__ == '__main__':
main()
``` |
{
"source": "JORGETORK/TheGame",
"score": 3
} |
#### File: TheGame/caminos_de_la_historia/camino_numero_1.py
```python
import os
import datetime
import subprocess
import sys
import time
from time import sleep
from io import open
from colorama import Cursor, init, Fore, Back, Style
from acciones_de_hostales import hostal_1
def camino():
archivo = open(".nombres_del_juego.txt","r")
texto = archivo.readlines()
archivo.close()
archivo_1 = open(".stats.txt","r")
texto_1 = archivo_1.readlines()
archivo_1.close()
Ahora = datetime.datetime.now()
print(Ahora)
print("como ves ya cambiaron tus stats porque al lejir este camino has caminado mucho")
print("Dinero: $" + texto_1[0].rstrip("\n") + " Energia: " + texto_1[1].rstrip("\n") + "%" +" comida: " + texto_1[2].rstrip("\n") + "%")
print("muy bien " + texto[1].rstrip("\n") + " has elejido ir por la izquierda")
camino_1 = input("Hace ya 3 dias que tu historia comenzó te aventuraste sin rumbo has caminado mucho y descansado muy poco pero que suerte te encontraste un hostal\n ¿quieres entrar?\n 1)si\n 2)no\n Elije una opcion: ")
os.system ("clear")
if camino_1 == "1" :
Archivo = open("Tu Historia De TheGame.txt","w")
Archivo.write("Todo comenzó como un juego cuando eras pequeño pero hoy sé ha tornado realidad, tienes que dejar tu pueblo " + texto[2].rstrip("\n") + " para ir en busca de tu hermano " + texto[5].rstrip("\n") + " que fue secuestrado por " + texto[3].rstrip("\n") + ".\n")
Archivo.write("Te han revelado que " + texto[3].rstrip("\n") +" y tu padre " + texto[4].rstrip("\n") + " tenian una fuerte rivalidad porque alguna ves fueron aprendices del guerrero mas grande de toda la historia, pero tu padre demostro ser mucho mas poderoso y tener mas corazon.\n Sin embargo tu padre fallecio hace ya 10 años por su gran corazon al proteger a este mundo de las mas grandes amenasas,\n amenasas que ahora estan a punto de ser liveradas y por eso necesitas rescatar a tu hermano para juntar su fragemnto del mapa con el tuyo para averiguar como tu padre salvo al mundo.\n")
Archivo.write("Por el momento solo has llegado hasta el hostal de JA Silva")
Archivo.close()
init()
print(Ahora)
print("ENTRANDO AL HOSTAL... ")
for arch in ["[ ] 0%", "[===== ] 25%", "[========== ] 50%", "[=============== ] 75%", "[====================] 100%"]:
sleep(2)
print(Cursor.UP(1)+Cursor.FORWARD(21)+str(arch))
os.system("clear")
print("Bienvenido al hostal de JA Silva")
print("(se guardo una copia de la historia en un archivo .txt en la carpeta del juego por si te interesa.)")
print("Bien " + texto[1].rstrip("\n") + " ya estas dentro del hostal ¿que quieres hacer?\n")
hostal_1.acciones_del_hostal_1()
elif camino_1 == "2" :
print("que mal quien sabe cuando encontraras otro buen lugar para descansar")
else:
os.system ("clear")
print("Elije una opcion valida")
camino()
```
#### File: TheGame/partes_de_la_historia/historia_del_juego_parte_1.py
```python
import os
import datetime
import subprocess
import sys
import time
from time import sleep
from io import open
from colorama import Cursor, init, Fore, Back, Style
from caminos_de_la_historia import camino_numero_1
def inicio_del_juego():
archivo = open(".nombres_del_juego.txt","r")
texto = archivo.readlines()
archivo.close()
archivo_1 = open(".stats.txt","r")
texto_1 = archivo_1.readlines()
archivo_1.close()
Ahora = datetime.datetime.now()
print(Ahora)
print("Inicias con los siguientes stats, pero el tomar caminos largos te cansara y te dara hambre al igual que todo tiene un costo")
print("Dinero: $" + texto_1[0].rstrip("\n") + " Energia: " + texto_1[1].rstrip("\n") + "%" +" comida: " + texto_1[2].rstrip("\n") + "%")
input("Enter para seguir ")
os.system("clear")
print(Ahora)
print("Dinero: $" + texto_1[0].rstrip("\n") + " Energia: " + texto_1[1].rstrip("\n") + "%" +" comida: " + texto_1[2].rstrip("\n") + "%")
print("Todo comenzó como un juego cuando eras pequeño pero hoy sé ha tornado realidad, tienes que dejar tu pueblo " + texto[2].rstrip("\n") + " para ir en busca de tu hermano " + texto[5].rstrip("\n") + " que fue secuestrado por " + texto[3].rstrip("\n") + ".")
print("Te han revelado que " + texto[3].rstrip("\n") +" y tu padre " + texto[4].rstrip("\n") + " tenian una fuerte rivalidad porque alguna ves fueron aprendices del guerrero mas grande de toda la historia, pero tu padre demostro ser mucho mas poderoso y tener mas corazon.\n Sin embargo tu padre fallecio hace ya 10 años por su gran corazon al proteger a este mundo de las mas grandes amenasas,\n amenasas que ahora estan a punto de ser liveradas y por eso necesitas rescatar a tu hermano para juntar su fragemnto del mapa con el tuyo para averiguar como tu padre salvo al mundo.\n")
input("Enter para seguir ")
os.system("clear")
inicio_del_juego_1()
def inicio_del_juego_1():
archivo = open(".nombres_del_juego.txt","r")
texto = archivo.readlines()
archivo.close()
archivo_1 = open(".stats.txt","r")
texto_1 = archivo_1.readlines()
archivo_1.close()
Ahora = datetime.datetime.now()
print(Ahora)
print("Dinero: $" + texto_1[0].rstrip("\n") + " Energia: " + texto_1[1].rstrip("\n") + "%" +" comida: " + texto_1[2].rstrip("\n") + "%")
primer_movimiento = input("Bien ya que tienes todo listo y has salido del pueblo te encontraste dos caminos uno por la izquierda y otro por la derecha\n 1)izquierda\n 2)derecha\n ¿por cual camino quieres?: ")
os.system ("clear")
if primer_movimiento == "1" :
suma = int(texto_1[0].rstrip("\n"))-0
suma_2 = int(texto_1[1].rstrip("\n"))-50
suma_3 = int(texto_1[2].rstrip("\n"))-50
archivo_2 = open(".stats.txt","w")
archivo_2.write(str(suma) + "\n")
archivo_2.write(str(suma_2) + "\n")
archivo_2.write(str(suma_3) + "\n")
archivo_2.close()
os.system("clear")
camino_numero_1.camino()
elif primer_movimiento == "2" :
suma = int(texto_1[0].rstrip("\n"))-0
suma_2 = int(texto_1[1].rstrip("\n"))-50
suma_3 = int(texto_1[2].rstrip("\n"))-50
archivo_2 = open(".stats.txt","w")
archivo_2.write(str(suma) + "\n")
archivo_2.write(str(suma_2) + "\n")
archivo_2.write(str(suma_3) + "\n")
archivo_2.close()
os.system("clear")
camino_numero_1.camino()
else:
os.system("clear")
print("Elije una opcion valida")
inicio_del_juego_1()
``` |
{
"source": "JorgeTranin/Cursos_Coursera",
"score": 4
} |
#### File: Curso de Python USP Part1/Exercicios/coeficienteBinominal.py
```python
def fatorial(numero):
if numero == 0:
numero = 1
fat = 1
while numero != 1:
fat *= numero
numero -= 1
return fat
def numero_binomial(n,k):
print(fatorial(6))
```
#### File: Curso de Python USP Part1/Exercicios/ElementosRepitidos_lista.py
```python
def remove_repetidos(list):
"""[Remove elementos repitidos de uma lista e devolve outra lista com esses elementos em ordem crescente]
Arguments:
list {[lista]} -- [uma lista]
Returns:
[lista] -- [elementos em ordem]
"""
lista2 = []
for elemento in list:
if elemento not in lista2:
lista2.append(elemento)
lista2.sort()
return lista2
lista = [2, 4, 2, 2, 3, 3, 1]
print(remove_repetidos(lista))
print(remove_repetidos([1, 2, 3, 3, 3, 4]))
```
#### File: Curso de Python USP Part1/Exercicios/maior_primo.py
```python
def eprimo(n):
cont = 0
for i in range(1, n):
if n % i == 0:
cont += 1
if cont > 1:
break
if cont > 1:
return False
else:
return True
def maior_primo(n):
primo = n
j = 0
while j <= n:
if eprimo(j):
primo = j
j += 1
return primo
print(maior_primo(100))
print(maior_primo(7))
```
#### File: Curso de Python USP Part1/Exercicios/ProgramaCompleto_similaridade_COH-PIAH.py
```python
import re
def le_assinatura():
"""[A funcao le os valores dos tracos linguisticos do modelo e devolve uma assinatura a ser comparada com os textos fornecidos]
Returns:
[list] -- [description]
"""
print("Bem-vindo ao detector automático de COH-PIAH.")
print("Informe a assinatura típica de um aluno infectado:")
wal = float(input("Entre o tamanho médio de palavra:"))
ttr = float(input("Entre a relação Type-Token:"))
hlr = float(input("Entre a Razão Hapax Legomana:"))
sal = float(input("Entre o tamanho médio de sentença:"))
sac = float(input("Entre a complexidade média da sentença:"))
pal = float(input("Entre o tamanho medio de frase:"))
return [wal, ttr, hlr, sal, sac, pal]
def le_textos():
"""[A funcao le todos os textos a serem comparados e devolve uma lista contendo cada texto como um elemento]
Returns:
[Lista] -- [Cada texto com 1 elemento]
"""
i = 1
textos = []
texto = input("Digite o texto " + str(i) + " (aperte enter para sair):")
while texto:
textos.append(texto)
i += 1
texto = input("Digite o texto " + str(i) +
" (aperte enter para sair):")
return textos
def separa_sentencas(texto):
"""[A funcao recebe um texto e devolve uma lista das sentencas dentro do texto]
Arguments:
texto {[type]} -- [description]
Returns:
[type] -- [description]
"""
sentencas = re.split(r'[.!?]+', texto)
if sentencas[-1] == '':
del sentencas[-1]
return sentencas
def separa_frases(sentenca):
"""[A funcao recebe uma sentenca e devolve uma lista das frases dentro da sentenca]
Arguments:
sentenca {[str]} -- [recebe uma frase]
Returns:
[lista] -- [lista das frases contidas na sentença]
"""
return re.split(r'[,:;]+', sentenca)
def separa_palavras(frase):
"""[A funcao recebe uma frase e devolve uma lista das palavras dentro da frase]
Arguments:
frase {[str]} -- [Uma frase]
Returns:
[lista] -- [Retorna uma lista de palavras dentro da frase recebida]
"""
return frase.split()
def n_palavras_unicas(lista_palavras):
"""[Essa funcao recebe uma lista de palavras e devolve o numero de palavras que aparecem uma unica vez]
Arguments:
lista_palavras {[list]} -- [Recebe uma lista de palavras]
Returns:
[int] -- [Devolve o numero de palavras que aparecem uma unica vez]
"""
freq = dict()
unicas = 0
for palavra in lista_palavras:
p = palavra.lower()
if p in freq:
if freq[p] == 1:
unicas -= 1
freq[p] += 1
else:
freq[p] = 1
unicas += 1
return unicas
def n_palavras_diferentes(lista_palavras):
"""[Essa funcao recebe uma lista de palavras e devolve o numero de palavras diferentes utilizadas]
Arguments:
lista_palavras {[list]} -- [lista de palavras]
Returns:
[int] -- [Retorna o tamanho de palavras diferentes]
"""
freq = dict()
for palavra in lista_palavras:
p = palavra.lower()
if p in freq:
freq[p] += 1
else:
freq[p] = 1
return len(freq)
def compara_assinatura(as_a, as_b):
"""[IMPLEMENTAR. Essa funcao recebe duas assinaturas de texto e deve devolver o grau de similaridade nas assinaturas.]
Arguments:
as_a {[list]} -- [description]
as_b {[list]} -- [description]
Returns:
[float] -- [Grau de similaridade dos textos]
"""
soma_das_similaridade = 0
for i in range(0, 6):
soma_das_similaridade += (abs(as_a[i] - as_b[i]))
return soma_das_similaridade / 6
def calcula_assinatura(texto):
"""[IMPLEMENTAR. Essa funcao recebe um texto e deve devolver a assinatura do texto.]
Arguments:
texto {[list]} -- [lista recebida de um texto]
Returns:
[list] -- [devolve a assinatura que o usuario é ou não infectado pelo COH-PIAH]
"""
''''''
sentenças = separa_sentencas(texto)
frases = list()
palavras = list()
meias_palavras = 0
meias_sentenças = 0
comp_sentenças = 0
rel_type_token = 0
hapax = 0
soma_caractere_das_sentenças = 0
soma_das_palavras = 0
soma_os_caracteres_das_frases = 0
tamanho_meio_frase = 0
for sentença in sentenças:
soma_caractere_das_sentenças += len(sentença)
l_frases = separa_frases(sentença)
for fra in l_frases:
frases.append(fra)
for frase in frases:
soma_os_caracteres_das_frases += len(frase)
lista_palavra = separa_palavras(frase)
for palavra in lista_palavra:
palavras.append(palavra)
for palavra in palavras:
soma_das_palavras += len(palavra)
meias_palavras = soma_das_palavras / len(palavras)
rel_type_token = n_palavras_diferentes(palavras) / len(palavras)
hapax = n_palavras_unicas(palavras) / len(palavras)
meias_sentenças = soma_caractere_das_sentenças / len(sentenças)
comp_sentenças = len(frases) / len(sentenças)
tamanho_meio_frase = soma_os_caracteres_das_frases / len(frases)
return [meias_palavras, rel_type_token, hapax,meias_sentenças,comp_sentenças, tamanho_meio_frase]
def avalia_textos(textos, ass_cp):
"""[IMPLEMENTAR. Essa funcao recebe uma lista de textos e uma assinatura ass_cp e deve devolver o numero (1 a n) do texto com maior probabilidade de ter sido infectado por COH-PIAH.]
Arguments:
textos {[list]} -- [description]
ass_cp {[list]} -- [description]
Returns:
[int] -- [description]
"""
''''''
inf = []
for texto in textos:
ass_texto = calcula_assinatura(texto)
inf.append(compara_assinatura(ass_texto, ass_cp))
menor = inf[0]
c = 1
for i in range(1, len(inf)):
if (menor < inf[i]):
c = i
return c
# Programa Principal main
assinatura = le_assinatura()
textos = le_textos()
avaliar = avalia_textos(textos, assinatura)
print(f'O autor do texto {avaliar} está infectado com COH-PIAH')
```
#### File: Curso de Python USP Part1/Exercicios/Soma_elementos_lista.py
```python
def soma_elementos(lista):
"""[Soma elementos de uma lista]
Arguments:
lista {[lista]} -- [quantos numeros quiser]
Returns:
[int] -- [elementos somados da lista]
"""
soma_lista = 0
for elemento in lista:
soma_lista += elemento
return soma_lista
print(soma_elementos([1, 2, 3, 4, 5, 6, 7, 8]))
```
#### File: Curso de python USP part2/Exercicios/Soma_de_Matrizes.py
```python
def dimensoes(matriz):
li = len(matriz)
c = 1
for i in matriz:
c = len(i)
return li, c
def soma_matrizes(m1, m2):
l, c = dimensoes(m1)
if dimensoes(m1) == dimensoes(m2):
m3 = m1
for i in range(0, l):
for col in range(0, c):
m3[i][col] = m1[i][col] + m2[i][col]
return m3
else:
return False
``` |
{
"source": "JorgeTranin/Python_Curso_Em_Video",
"score": 4
} |
#### File: Python_Curso_Em_Video/Exercicios Curso Em Video Mundo 3/Exe_096.py
```python
def area():
print(f'A área do terreno {l :.1f} X {c :.1f} é {l*c :.2f} M²')
#! Programa principal
print(' Controle de terrenos')
print('-'*30)
l = float(input('Digite a largura (m): '))
c = float(input('Digite o comprimento (m): '))
area(l, c)
``` |
{
"source": "jorgeucano/angular-bazel-example",
"score": 2
} |
#### File: tools/history-server/index.bzl
```python
load("@build_bazel_rules_nodejs//:defs.bzl", "nodejs_binary")
def history_server(port = 5432, args = [], **kwargs):
if not args:
args = [native.package_name()]
args.extend(["-p", str(port)])
nodejs_binary(
node_modules = "@history-server_runtime_deps//:node_modules",
entry_point = "history-server/modules/cli.js",
args = args,
**kwargs)
``` |
{
"source": "jorgeuliana1/road-generator",
"score": 3
} |
#### File: src/extra_tools/classes_distribution.py
```python
import sys
def main():
annotation_path = sys.argv[1]
with open(annotation_path, "r") as annotation_file:
csv_lines = annotation_file.read().strip().split("\n")
categories_occurrences = {}
for csv_line in csv_lines:
csv_info = csv_line.split(",")
_, _, _, _, _, category = csv_info
if category not in categories_occurrences.keys():
categories_occurrences[category] = 1
else:
categories_occurrences[category] += 1
print(categories_occurrences)
if __name__ == "__main__":
main()
```
#### File: src/extra_tools/filter_classes.py
```python
import json, sys, os, argparse
def import_annotations(annotations_path):
# Opening file
with open(annotations_path, "r") as f:
lines = f.read().strip().split("\n")
annotations = []
for line in lines:
info = line.split(",")
annotations.append({
"path" : info[0],
"x0" : info[1],
"y0" : info[2],
"x1" : info[3],
"y1" : info[4],
"category" : info[5]
})
return annotations
def import_classes(classes_path, classes_to_keep):
# Opening file
with open(classes_path, "r") as f:
classes_json = json.load(f)
classes_list = list(classes_json.keys())
# Returns only classes that will be kept
return [ c for c in classes_list if c in classes_to_keep ]
def annotation_to_line(annotation):
return "{},{},{},{},{},{}".format(annotation["path"],
annotation["x0"], annotation["y0"],
annotation["x1"], annotation["y1"],
annotation["category"])
def substitute_in_annotation(annotations, class_to_substitute, new_class):
for annotation in annotations:
if annotation["category"] == class_to_substitute:
annotation["category"] = new_class
def keep_in_annotation(annotations, allowed_classes):
new_annotations = [ a for a in annotations if a["category"] in allowed_classes]
return new_annotations
def arguments():
argparser = argparse.ArgumentParser()
argparser.add_argument('--output')
argparser.add_argument('--annotations')
argparser.add_argument('--keep', nargs='+')
argparser.add_argument('--swap', nargs='+')
argparser.add_argument('--classes')
values = vars(argparser.parse_args())
return {
"output_folder" : values['output'],
"classes_path" : values['classes'],
"annotations_path": values['annotations'],
"keep" : values['keep'],
#"substitute_from" : values['swap'][0],
#"substitute_to" : values['swap'][1]
}
def dump(annotations, classes, output_dir):
# Creating the output folder:
os.makedirs(output_dir, exist_ok=True)
annotations_path = os.path.join(output_dir, "annotations.csv")
classes_path = os.path.join(output_dir, "classes.json")
# Dumping annotations.csv:
with open(annotations_path, "w") as f:
for annotation in annotations:
f.write(annotation_to_line(annotation))
if annotation != annotations[-1]: f.write("\n")
# Dumping classes.json:
classes_dict = {}
for i in range(len(classes)):
classes_dict[classes[i]] = i
with open(classes_path, "w") as classes_json:
classes_json.write(json.dumps(classes_dict, indent=2))
args = arguments()
annotations = import_annotations(args["annotations_path"])
classes = import_classes(args["classes_path"], args["keep"])
#substitute_in_annotation(annotations, args["substitute_from"], args["substitute_to"])
annotations = keep_in_annotation(annotations, args["keep"])
dump(annotations, classes, args["output_folder"])
```
#### File: road-generator/src/roadimage.py
```python
from road import Road
from mark_tracker import MarkTracker
from roadgraphics import *
from crosswalk import CrossWalk
import random
import math
import drawings as drw
class RoadImage:
def __init__(self, dimensions, path, background_images, asphalt_textures, templates_collection, seed=0, set_seed=True):
self.w, self.h = dimensions
self.path = path # Where the image will be saved
self.road = Road(self.w, self.h)
# Defining the dictionaries containing random images
self.templates_collection = templates_collection
self.backgrounds = background_images
self.grounds = asphalt_textures
if set_seed:
random.seed(seed)
def setSeed(self, seed):
random.seed(seed)
def defineLanes(self, min_lanes, max_lanes, variation):
number_of_lanes = random.randint(min_lanes, max_lanes)
self.number_of_lanes = number_of_lanes
default_lane_proportion = (self.w / float(number_of_lanes))/float(self.w)
# Variation is given in percentage
# Creating the lanes:
lane_sizes = []
for i in range(number_of_lanes - 1):
# "addition" is the variation in the specific lane
addition = random.randint(-variation, variation) / 100.00 * default_lane_proportion
lane_width = addition + default_lane_proportion
lane_width = round(lane_width, 2) # Rounding the value to simplificate the sizes
# Adding the lane sizes in a list to create the last lane without any size issue:
lane_sizes.append(lane_width)
self.road.newLane(math.ceil(lane_width*self.w))
# Creating the last lane
lanes_size = 0
for i in lane_sizes:
lanes_size += i
lane_width = self.w - math.floor(lanes_size * self.w)
self.road.newLane(lane_width)
def getRoad(self):
return self.road
def randomMark(self): # Returns a random template name
templates_names = tuple(self.templates.keys())
random_key = templates_names[random.randint(0,len(templates_names) - 1)]
return random_key
def randomBackground(self): # Returns a random background name
backgrounds = self.backgrounds
return backgrounds[random.randint(0, len(backgrounds) - 1)]
def randomGround(self):
grounds = self.grounds
return grounds[random.randint(0, len(grounds) - 1)]
def getRotation(self, minx, maxx, miny, maxy, minz, maxz):
def randomRotation(mind, maxd):
multiplier = random.randint(0, 1000) / 1000
difference = maxd - mind
return multiplier * difference + mind
# Getting Y and Z rotation signals (positive or negative):
ysig = random.sample((-1, 1), 1)[0]
zsig = random.sample((-1, 1), 1)[0]
# Getting rotations, in deegrees
x = -randomRotation(minx, maxx)
y = ysig * randomRotation(miny, maxy)
z = zsig * randomRotation(minz, maxz)
# Converting to radians:
x = x/180.00 * math.pi
y = y/180.00 * math.pi
z = z/180.00 * math.pi
# Returning rotations:
return (x, y, z)
def getShift(self, minx, maxx, miny, maxy):
# Getting shifts, in pixels
x = random.randint(0, 100) / 100 * (maxx - minx) + minx
y = random.randint(0, 100) / 100 * (maxy - miny) + miny
return (x, y)
def getRandomLane(self):
lanes = len(self.road.lanes)
return random.randint(0, lanes-1)
def getRandomSeparator(self, minwidth, maxwidth, mindotsize, maxdotsize, mindotdist, maxdotdist, minxdist, maxxdist):
# Defining colors:
colors = [
(255, 255, 255), # WHITE
(255, 255, 0), # YELLOW
(128, 128, 0) # DARK YELLOW
]
# Getting random color:
color = colors[random.randint(0,len(colors)-1)]
# Getting random dot_size:
dot_size = random.randint(mindotsize, maxdotsize)
# Getting random dot_dist:
dot_dist = random.randint(mindotdist, maxdotdist)
# Getting random x_dist:
x_dist = random.randint(minxdist, maxxdist)
# Getting random width:
width = random.randint(minwidth, maxwidth)
# Getting random true or false:
is_true = bool(random.getrandbits(1))
return (width, color, is_true, dot_size, dot_dist, x_dist)
def getLanesNumber(self):
return self.number_of_lanes
def insert_templates_at_lanes(self, delta_x, delta_y, min_h, max_h, min_w, max_w):
"""
min_h, max_h, min_w and max_w are proportions, they must be between 0 and 1
delta_x and delta_y are proportions, they must be between 0 and 1
"""
labels = self.templates_collection.labels
m = len(labels) # m : number of loaded templates.
road = self.getRoad()
# L is a vector which each index represents a lane in the road:
L = [math.ceil(m * random.randint(0, 100) / 100) for i in range(self.number_of_lanes)]
# Creating one empty lane:
if len(L) > 1:
L[int(random.randint(0, 100) / 100) * (len(L)) - 1] = -1 # -1 means that there will be no template at that lane.
# Defining the exact position and vectors of the to-be-inserted templates:
templates = []
for l in range(len(L)):
Ln = L[l] - 1
if Ln == -1: continue # Skipping the "supposed-to-be-empty" lanes
lane = road.lanes[l]
# Defining the template's dimensions:
min_size = (min_h + min_w) / 2 * lane.w
max_size = (max_h + max_w) / 2 * lane.w
base_siz = random.randint(0, 100) / 100 * (max_size - min_size) + min_size
base_dim = (int(base_siz), int(base_siz))
# Getting the template vector:
template = self.templates_collection.get(labels[Ln], base_dim)
# Inserting the template at the lane:
dx, dy = lane.getAbsoluteCoordinates(int(delta_x * lane.w), int(delta_y * lane.h))
template.displacement = dx, dy
templates.append(template)
return templates
def draw_templates(self, img, templates):
for template in templates:
img = template.draw(img, (255, 255, 255, 255))
return img
def getTransform(self, maxblur, maxconstrast, maxbrightness):
constrast = random.randint(0, maxconstrast)
brightness = random.randint(0, maxbrightness)
blurvalues = [1, 1, 3, 3, 5, 5, 7, 7, 9, 9]
constrast = random.randint(0, maxconstrast)
blur = blurvalues[random.randint(0, maxblur)]
return blur, constrast/100, brightness/100
def getAgingMatrix(self, max_age):
h, w = self.h, self.w
aging_matrix = np.abs(np.random.randn(h, w))
aging_matrix = np.clip(aging_matrix, 0, 0.01 * max_age)
return aging_matrix
``` |
{
"source": "jorgev259/Kurisu",
"score": 3
} |
#### File: Kurisu/addons/imgconvert.py
```python
from io import BytesIO
import discord
import requests
from discord.ext import commands
from PIL import Image
class ImageConvert:
"""
Convert images automatically.
"""
def __init__(self, bot):
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
async def on_message(self, message):
# BMP conversion
for f in message.attachments:
if f["filename"].lower().endswith('.bmp') and f["size"] <= 600000: # 600kb
img_request = requests.get(f["url"])
img_obj = Image.open(BytesIO(img_request.content))
img_out = BytesIO()
img_obj.save(img_out, 'png')
img_out.seek(0)
out_message = "{} from {}".format(self.bot.escape_name(f["filename"]), message.author.mention)
new_filename = f["filename"][:-3] + "png"
await self.bot.send_file(message.channel, img_out, filename=new_filename, content=out_message)
def setup(bot):
bot.add_cog(ImageConvert(bot))
``` |
{
"source": "jorgevasquez397/MyCSSI2019Lab",
"score": 4
} |
#### File: M3-Python/L3-Python_Object/Pet.py
```python
class Pet(object):
def __init__(self, name, age):
self.name = name
self.age = age
my_pet1 = Pet("Fido", 3)
print("My pet %s is %s years old") % (my_pet1.name, my_pet1.age)
my_pet2 = Pet("Chungus", 69)
print("My pet %s is %s years old") % (my_pet2.name, my_pet2.age)
my_pet3 = Pet("Lil dug", 420)
print("My pet %s is %s years old") % (my_pet3.name, my_pet3.age)
class Student(object):
def __init__(self, name, age, gender, major):
self.name = name
self.age = age
self.gender = gender
self.major = major
my_student = Student("<NAME>", 17, "Male", "Computer Science")
print("Hi my name is %s and I am %s years old. My gender is %s and I am majoring in %s") % (my_student.name, my_student.age, my_student.gender, my_student.major)
class Pet(object):
def __init__(self, name, age, animal):
self.name = name
self.age = age
self.animal = animal
self.is_hungry = False
self.mood = "happy"
def eat(self):
print("> %s is eating..." % self.name)
if self.is_hungry:
self.is_hungry = False
else:
print("> %s may have eaten too much." % self.name)
self.mood = "lethargic"
def move(self):
print("> %s is eating..." % self.name)
if self.mood:
self.mood = "happy"
else:
print("> %s doesn't look too happy today." % self.name)
self.mood = "lethargic"
my_pet = Pet("Spooky", 3, "dog")
my_pet.is_hungry = True
print("Is my pet hungry? %s" % my_pet.is_hungry)
print("How about now? %s" % my_pet.is_hungry)
print("My pet is feelings %s" % my_pet.mood)
``` |
{
"source": "jorgevilaca82/iguana",
"score": 2
} |
#### File: base/models/mixins.py
```python
import uuid
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
class HasSiglaMixin(models.Model):
"""Possiu atributo sigla"""
class Meta:
abstract = True
sigla = models.CharField(_("Sigla"), max_length=15)
def __str__(self) -> str:
return self.sigla
class HasNomeMixin(models.Model):
"""Possiu atributo nome"""
class Meta:
abstract = True
nome = models.CharField(_("Nome"), max_length=150)
def __str__(self) -> str:
return self.nome
class HasTituloMixin(models.Model):
"""
Models que possuem um atributo Título
"""
class Meta:
abstract = True
titulo = models.CharField(_("Título"), max_length=150)
def __str__(self):
return self.titulo
class HasDescricaoMixin(models.Model):
"""Possui atributo descrição"""
class Meta:
abstract = True
descricao = models.CharField(_("Descrição"), max_length=255)
def __str__(self) -> str:
return self.descricao
class HasObservacaoMixin(models.Model):
"""Possiu atributo observação"""
class Meta:
abstract = True
observacao = models.TextField(_("Observação"), null=True, blank=True)
def __str__(self) -> str:
return self.observacao
class Timestamps(models.Model):
class Meta:
abstract = True
created_at = models.DateTimeField(_("Created At"), auto_now_add=True)
updated_at = models.DateTimeField(_("Updated At"), auto_now=True)
class UUIDMixin(models.Model):
"""
Models que precisam de UUID como chave primária
"""
class Meta:
abstract = True
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
class HasDateTimeRangeMixin(models.Model):
"""Possui data de inicio e fim"""
class Meta:
abstract = True
started_at = models.DateTimeField(_("Started At"))
ended_at = models.DateTimeField(_("Ended At"), blank=True, null=True)
def is_datetime_in_range(self, datetime):
if self.ended_at is None:
return True
return self.started_at < datetime < self.ended_at
def is_now_in_range(self):
return self.is_datetime_in_range(timezone.localtime())
class HasIsActiveFlagMixin(models.Model):
"""Possui atributo flag de ativo"""
class Meta:
abstract = True
is_active = models.BooleanField(_("Active"), default=True)
def activate(self):
self.is_active = True
def inactivate(self):
self.is_active = False
class GeoLocationMixin(models.Model):
"""Possui geolocalização (lat, long)"""
class Meta:
abstract = True
latitude = models.FloatField(
_("Latitude"), validators=[MinValueValidator(-90), MaxValueValidator(90)]
)
longitude = models.FloatField(
_("Longitude"), validators=[MinValueValidator(-180), MaxValueValidator(180)]
)
def get_location(self):
return self.latitude, self.longitude
```
#### File: base/models/pessoa_juridica.py
```python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from iguana.base.models.user import User
from localflavor.br.models import BRCNPJField
class PessoaJuridica(User):
cnpj = BRCNPJField(unique=True)
@property
def nome_fantasia(self):
return self.first_name
@nome_fantasia.setter
def nome_fantasia(self, value):
self.first_name = value
@property
def razao_social(self):
return self.last_name
@razao_social.setter
def razao_social(self, value):
self.last_name = value
def get_full_name(self):
"""
Return the first_name plus the last_name, with a space in between.
"""
full_name = "%s (%s)" % (self.first_name, self.last_name)
return full_name.strip()
``` |
{
"source": "jorgevilaca82/sagii",
"score": 2
} |
#### File: base/tests/pessoa_tests.py
```python
from django.core.exceptions import ValidationError
from django.test import TestCase
from ..models import *
class PessoaTestCase(TestCase):
fixtures = ('documentopessoatipo.yaml',)
def setUp(self):
self.p1 = Pessoa.objects.create(
nome_razao_social='Instituto Federal de Rondônia - IFRO')
def test_pessoa_p1_exists(self):
p1 = Pessoa.objects.filter(nome_razao_social__icontains='ifro')
self.assertTrue(
p1.exists(), 'não existe nenhuma pessoa com o critério procurado.')
p1 = Pessoa.objects.get(pk=1)
self.assertIsNotNone(p1)
self.assertIsInstance(p1, Pessoa)
self.assertEqual(p1.nome_razao_social,
'Instituto Federal de Rondônia - IFRO')
def test_pessoa_p1_tem_enderecos(self):
self.assertEquals(self.p1.base_endereco_related.count(), 0)
end1 = Endereco(
tipo=Endereco.Tipo.RESIDENCIAL,
cep='76821-001',
logradouro='Av. Tiradentes',
numero='3009',
bairro='Setor Industrial',
cidade='Porto Velho',
uf='RO',
complemento='lado direito',
principal=True,
pessoa=self.p1
)
end1.save()
self.assertEquals(self.p1.base_endereco_related.count(), 1)
self.assertEquals(end1.tipo, Endereco.Tipo.RESIDENCIAL)
self.assertEquals(end1.cep, '76821-001')
self.assertEquals(end1.pessoa.nome_razao_social,
'Instituto Federal de Rondônia - IFRO')
# save automatico do segundo endereço
end2 = self.p1.base_endereco_related.create(
tipo=Endereco.Tipo.COMERCIAL,
cep='76804-124',
logradouro='Av. 7 de setembro',
numero='2090',
bairro='<NAME>',
cidade='Porto Velho',
uf='RO',
complemento='lado direito'
)
self.assertIsNotNone(end2.pk)
self.assertEquals(end2.pessoa.id, self.p1.id)
self.assertGreater(self.p1.base_endereco_related.count(), 1,
'não existe mais de 1 endereco')
def test_pessoa_p1_tem_contato_social(self):
self.assertEquals(self.p1.base_contatosocial_related.count(), 0)
c1_whatsapp = ContatoSocial(
tipo=ContatoSocial.Tipo.WHATSAPP, valor='+55 69 9.9999-1234', pessoa=self.p1)
c1_whatsapp.save()
self.assertEquals(c1_whatsapp.pk, 1)
self.assertIsNotNone(c1_whatsapp.pessoa)
self.assertEquals(c1_whatsapp.pessoa.pk, self.p1.pk)
self.assertEquals(str(c1_whatsapp), 'Whatsapp: +55 69 9.9999-1234')
def test_pessoa_p1_tem_telefones(self):
self.assertEquals(self.p1.base_telefone_related.count(), 0)
c1_cel = Telefone(tipo=Telefone.Tipo.CEL, numero='69 9.9999-1234', pessoa=self.p1)
with self.assertRaises(ValidationError) as target:
c1_cel.full_clean()
# verifica se a chave que representa o número do telefone
# está contida no dicionario de erros fornecido pela exception
self.assertTrue('numero' in target)
c1_cel.numero = '69 99999-1234'
# número válido não lança exceção
self.assertIsNone(c1_cel.full_clean())
c1_cel.save()
self.assertIsNotNone(c1_cel.pk)
self.assertEquals(c1_cel.pessoa.pk, self.p1.pk)
def test_pessoa_p1_tem_documento_pessoal(self):
self.assertEquals(self.p1.base_documentopessoal_related.count(), 0)
doc_tipos = DocumentoPessoalTipo.objects.all()
# verifica se os tipos básicos de documentos foram
# carregados pela fixture
self.assertGreaterEqual(len(doc_tipos), 6)
for doc_tipo in doc_tipos:
# A validação do valor do documento com base no tipo provavelmente só será
# validada na web através de uma classe form.
self.p1.base_documentopessoal_related.create(
tipo=doc_tipo,
valor='num/id do ' + doc_tipo.nome,
observacoes='nenhuma observação'
)
self.assertGreaterEqual(self.p1.base_documentopessoal_related.count(), 6)
doc_cnh = self.p1.base_documentopessoal_related.filter(tipo__nome='CNH').get()
self.assertEquals(doc_cnh.valor, 'num/id do CNH')
def test_create_pessoafisica(self):
self.pf1 = PessoaFisica(nome='<NAME>')
self.assertIsInstance(self.pf1, Pessoa)
self.assertIsInstance(self.pf1, PessoaFisica)
self.assertNotIsInstance(self.pf1, PessoaJuridica)
def test_create_pessoajuridica(self):
self.pj1 = PessoaJuridica(razao_social='Instituto Brasileiro')
self.assertIsInstance(self.pj1, Pessoa)
self.assertIsInstance(self.pj1, PessoaJuridica)
self.assertNotIsInstance(self.pj1, PessoaFisica)
```
#### File: sagii/commons/context_processors.py
```python
from django.conf import settings
def app_settings(request):
default_layout = 'layout.html'
if hasattr(settings, 'DEFAULT_LAYOUT'):
default_layout = settings.DEFAULT_LAYOUT
return {
'default_layout': default_layout
}
```
#### File: sagii/commons/__init__.py
```python
from enum import Enum, EnumMeta
class ChoiceEnumCharValueMeta(EnumMeta):
def __iter__(self):
return ((tag.value, tag.value) for tag in super().__iter__())
class AutoNameEnum(Enum):
def _generate_next_value_(name, start, count, last_values):
return name.title()
``` |
{
"source": "jorgevilaca82/SGI",
"score": 2
} |
#### File: sgi/academico/forms.py
```python
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Column, Layout, Row, Submit
from django.forms import ModelForm, modelform_factory
from django.utils.translation import gettext_lazy as _
from . import models as am
class _UnidadeDeEnsinoForm(ModelForm):
class Meta:
model = am.UnidadeDeEnsino
fields = (
"tipo",
"sigla",
"nome",
"pessoa_juridica",
# 'unidade_superior',
)
labels = {
"tipo": _("Tipo"),
"sigla": _("Sigla"),
"nome": _("Nome"),
"pessoa_juridica": _("PJ"),
# 'unidade_superior': _('Unid. Superior'),
}
extra_required = {
"pessoa_juridica": True,
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# set extra required fields
for field, required in self.Meta.extra_required.items():
self.fields[field].required = required
self.helper = FormHelper()
self.helper.layout = Layout(
Row(
Column("tipo", css_class="form-group col-md-4 mb-0"),
Column("sigla", css_class="form-group col-md-8 mb-0"),
),
Row(Column("nome", css_class="form-group col-md-12 mb-0"),),
Row(
Column("pessoa_juridica", css_class="form-group col-md-3 mb-0"),
Column("uo_superior", css_class="form-group col-md-6 mb-0"),
),
Submit("submit", "Salvar"),
)
UnidadeDeEnsinoForm = modelform_factory(am.UnidadeDeEnsino, form=_UnidadeDeEnsinoForm)
```
#### File: academico/services/registro_aluno.py
```python
import datetime
from injector import inject
class GeradorRA(object):
def gerar(self):
raise NotImplementedError()
class GeradorRAImpl(GeradorRA):
model = None
def gerar(self):
y = datetime.date.today().year
return f"{y}{self.model.pk}"
class AlunoRAUpdater(object):
@inject
def __init__(self, gerador_ra: GeradorRA, aluno):
self.gerador_ra = gerador_ra
self.aluno = aluno
self.gerador_ra.model = aluno
def update(self) -> None:
self.aluno.ra = self.gerador_ra.gerar()
self.aluno.save(update_fields=["ra"])
```
#### File: pessoa/models/pessoa.py
```python
from django.db import models
from sgi.commons.models import AuditableModel
class Pessoa(AuditableModel):
class Meta:
pass
nome_razao_social = models.CharField(max_length=255)
def __str__(self):
return self.nome_razao_social
class PessoaRelatedModel(AuditableModel):
class Meta:
abstract = True
pessoa = models.ForeignKey(
Pessoa,
on_delete=models.CASCADE,
related_name="%(app_label)s_%(class)s_related",
related_query_name="%(app_label)s_%(class)ss",
)
```
#### File: pessoa/models/telefone.py
```python
from enum import IntEnum, auto
from django.db import models
from django.utils.translation import gettext_lazy as _
from sgi.commons.validators import PhoneRegexValidator
from .pessoa import PessoaRelatedModel
class Telefone(PessoaRelatedModel):
class Meta:
unique_together = ("numero", "pessoa")
class Tipo(IntEnum):
FIXO = auto()
CEL = auto()
TELEFONE_TIPO_CHOICES = (
(Tipo.FIXO.value, _("Tel. Fixo")),
(Tipo.CEL.value, _("Tel. Celular")),
)
tipo = models.IntegerField(choices=TELEFONE_TIPO_CHOICES)
numero = models.CharField(max_length=120, validators=[PhoneRegexValidator()])
observacoes = models.TextField(null=True, blank=True)
def __str__(self):
return self.numero
def get_absolute_url(self):
from django.urls import reverse
# pylint: disable=no-member
kwargs = {"pessoa_id": self.pessoa_id, "pk": self.pk}
return reverse("sgi_base:pessoa-telefone-detail", kwargs=kwargs)
```
#### File: pessoa/views/documento.py
```python
from sgi.base import models as bm
from sgi.base.pessoa import forms
from . import generic
MODEL = bm.DocumentoPessoal
FORM_CLASS = forms.DocumentoForm
class ListView(generic.ListView):
model = MODEL
class CreateView(generic.CreateView):
model = MODEL
form_class = FORM_CLASS
# pylint: disable=no-member
success_message = (
model._meta.verbose_name + " com n. %(valor)s cadastrado com sucesso!"
)
documentos_disabled = []
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
kwargs = super().get_form_kwargs()
kwargs.update({"documentos_disabled": self.documentos_disabled})
return kwargs
def get(self, request, *args, **kwargs):
# Desabilita os tipos de documentos já utlizados pela pessoa
tipos = self.pessoa.base_documentopessoal_related.values("tipo")
self.documentos_disabled = [documento["tipo"] for documento in tipos]
return super().get(request, *args, **kwargs)
class DetailView(generic.DetailView):
model = MODEL
class UpdateView(generic.UpdateView):
model = MODEL
form_class = FORM_CLASS
# pylint: disable=no-member
success_message = (
model._meta.verbose_name + " com n. %(valor)s atualizada com sucesso!"
)
class DeleteView(generic.DeleteView):
model = MODEL
# pylint: disable=no-member
success_message = (
model._meta.verbose_name + " com n. %(valor)s excluída permanentemente!"
)
success_url_name = "sgi_base:pessoa-documento-list"
```
#### File: sgi/home/checks.py
```python
from django.apps import apps
from django.core.checks import Error, register
@register()
def sgi_commons_installed_check(app_configs, **kwargs):
errors = []
if apps.is_installed("sgi_commons"):
errors.append(
Error(
"SGI Commons não está instalado",
hint="Adicione ao settings.INSTALLED_APPS o app sgi.commons.",
obj=None,
id="sgi_home.E001",
)
)
return errors
``` |
{
"source": "jorgeviz/compile",
"score": 3
} |
#### File: jorgeviz/compile/modules.py
```python
import torch
import torch.nn.functional as F
from torch import nn
import utils
class CompILE(nn.Module):
"""CompILE example implementation.
Args:
input_dim: Dictionary size of embeddings.
hidden_dim: Number of hidden units.
latent_dim: Dimensionality of latent variables (z).
max_num_segments: Maximum number of segments to predict.
temp_b: Gumbel softmax temperature for boundary variables (b).
temp_z: Temperature for latents (z), only if latent_dist='concrete'.
latent_dist: Whether to use Gaussian latents ('gaussian') or concrete /
Gumbel softmax latents ('concrete').
"""
def __init__(self, input_dim, hidden_dim, latent_dim, max_num_segments,
temp_b=1., temp_z=1., latent_dist='gaussian'):
super(CompILE, self).__init__()
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.latent_dim = latent_dim
self.max_num_segments = max_num_segments
self.temp_b = temp_b
self.temp_z = temp_z
self.latent_dist = latent_dist
self.embed = nn.Embedding(input_dim, hidden_dim)
self.lstm_cell = nn.LSTMCell(hidden_dim, hidden_dim)
# LSTM output heads.
self.head_z_1 = nn.Linear(hidden_dim, hidden_dim) # Latents (z).
if latent_dist == 'gaussian':
self.head_z_2 = nn.Linear(hidden_dim, latent_dim * 2)
elif latent_dist == 'concrete':
self.head_z_2 = nn.Linear(hidden_dim, latent_dim)
else:
raise ValueError('Invalid argument for `latent_dist`.')
self.head_b_1 = nn.Linear(hidden_dim, hidden_dim) # Boundaries (b).
self.head_b_2 = nn.Linear(hidden_dim, 1)
# Decoder MLP.
self.decode_1 = nn.Linear(latent_dim, hidden_dim)
self.decode_2 = nn.Linear(hidden_dim, input_dim)
def masked_encode(self, inputs, mask):
"""Run masked RNN encoder on input sequence."""
hidden = utils.get_lstm_initial_state(
inputs.size(0), self.hidden_dim, device=inputs.device)
outputs = []
for step in range(inputs.size(1)):
hidden = self.lstm_cell(inputs[:, step], hidden)
hidden = (mask[:, step, None] * hidden[0],
mask[:, step, None] * hidden[1]) # Apply mask.
outputs.append(hidden[0])
return torch.stack(outputs, dim=1)
def get_boundaries(self, encodings, segment_id, lengths):
"""Get boundaries (b) for a single segment in batch."""
if segment_id == self.max_num_segments - 1:
# Last boundary is always placed on last sequence element.
logits_b = None
sample_b = torch.zeros_like(encodings[:, :, 0]).scatter_(
1, lengths.unsqueeze(1) - 1, 1)
else:
hidden = F.relu(self.head_b_1(encodings))
logits_b = self.head_b_2(hidden).squeeze(-1)
# Mask out first position with large neg. value.
neg_inf = torch.ones(
encodings.size(0), 1, device=encodings.device) * utils.NEG_INF
# TODO(tkipf): Mask out padded positions with large neg. value.
logits_b = torch.cat([neg_inf, logits_b[:, 1:]], dim=1)
if self.training:
sample_b = utils.gumbel_softmax_sample(
logits_b, temp=self.temp_b)
else:
sample_b_idx = torch.argmax(logits_b, dim=1)
sample_b = utils.to_one_hot(sample_b_idx, logits_b.size(1))
return logits_b, sample_b
def get_latents(self, encodings, probs_b):
"""Read out latents (z) form input encodings for a single segment."""
readout_mask = probs_b[:, 1:, None] # Offset readout by 1 to left.
readout = (encodings[:, :-1] * readout_mask).sum(1)
hidden = F.relu(self.head_z_1(readout))
logits_z = self.head_z_2(hidden)
# Gaussian latents.
if self.latent_dist == 'gaussian':
if self.training:
mu, log_var = torch.split(logits_z, self.latent_dim, dim=1)
sample_z = utils.gaussian_sample(mu, log_var)
else:
sample_z = logits_z[:, :self.latent_dim]
# Concrete / Gumbel softmax latents.
elif self.latent_dist == 'concrete':
if self.training:
sample_z = utils.gumbel_softmax_sample(
logits_z, temp=self.temp_z)
else:
sample_z_idx = torch.argmax(logits_z, dim=1)
sample_z = utils.to_one_hot(sample_z_idx, logits_z.size(1))
else:
raise ValueError('Invalid argument for `latent_dist`.')
return logits_z, sample_z
def decode(self, sample_z, length):
"""Decode single time step from latents and repeat over full seq."""
hidden = F.relu(self.decode_1(sample_z))
pred = self.decode_2(hidden)
return pred.unsqueeze(1).repeat(1, length, 1)
def get_next_masks(self, all_b_samples):
"""Get RNN hidden state masks for next segment."""
if len(all_b_samples) < self.max_num_segments:
# Product over cumsums (via log->sum->exp).
log_cumsums = list(
map(lambda x: utils.log_cumsum(x, dim=1), all_b_samples))
mask = torch.exp(sum(log_cumsums))
return mask
else:
return None
def forward(self, inputs, lengths):
# Embed inputs.
embeddings = self.embed(inputs)
# Create initial mask.
mask = torch.ones(
inputs.size(0), inputs.size(1), device=inputs.device)
all_b = {'logits': [], 'samples': []}
all_z = {'logits': [], 'samples': []}
all_encs = []
all_recs = []
all_masks = []
for seg_id in range(self.max_num_segments):
# Get masked LSTM encodings of inputs.
encodings = self.masked_encode(embeddings, mask)
all_encs.append(encodings)
# Get boundaries (b) for current segment.
logits_b, sample_b = self.get_boundaries(
encodings, seg_id, lengths)
all_b['logits'].append(logits_b)
all_b['samples'].append(sample_b)
# Get latents (z) for current segment.
logits_z, sample_z = self.get_latents(
encodings, sample_b)
all_z['logits'].append(logits_z)
all_z['samples'].append(sample_z)
# Get masks for next segment.
mask = self.get_next_masks(all_b['samples'])
all_masks.append(mask)
# Decode current segment from latents (z).
reconstructions = self.decode(sample_z, length=inputs.size(1))
all_recs.append(reconstructions)
return all_encs, all_recs, all_masks, all_b, all_z
``` |
{
"source": "jorgeviz/depcdmx",
"score": 3
} |
#### File: depcdmx/scrapers/lamudi.py
```python
import requests
import statistics
import pandas as pd
from bs4 import BeautifulSoup
import datetime as dt
import random
import time
import os
# Vars
_root = "https://www.lamudi.com.mx/"
_state = 'nuevo-leon'
_operation = 'venta'
_base_url = _root + _state + "/for-sale/?page={}"
ddir = 'data/'
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
'referrer': 'https://google.com',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
'Pragma': 'no-cache',
}
def save(depts):
""" Append page data
Params:
-----
depts : pd.Dataframe()
Dataframe of Departments
"""
# Read Existant file to append
_fname = ddir + "{}/lamudi" + "-" + _state + "-" + _operation + ".csv"
_fname = _fname.format(dt.date.today().isoformat())
try:
df = pd.read_csv(_fname, delimiter=',')
except:
print('New file, creating folder..')
try:
os.mkdir(ddir + '{}'.format(dt.date.today().isoformat()))
print('Created folder!')
except:
print('Folder exists already!')
df = pd.DataFrame()
# Append data
print(depts.head(1).to_dict())
try:
if df.empty:
depts.set_index(['name', 'location']).to_csv(_fname, sep=',')
print('Correctly saved file: {}'.format(_fname))
else:
df = pd.concat([df, depts])
df.set_index(['name', 'location']).to_csv(_fname, sep=',')
print('Correctly saved file: {}'.format(_fname))
except Exception as e:
print(e)
print('Could not save file: {}'.format(_fname))
def scrape(content):
""" Scrape all listings per page """
columns = ['name',
'description',
'location',
'link',
'price',
'operation',
'rooms',
'bathrooms',
'construction (m2)',
'terrain (m2)']
data = pd.DataFrame(columns=columns)
# Generate soup
soup = BeautifulSoup(content, 'html.parser')
# Get Characteristics
for d in soup.find_all(class_="ListingCell-AllInfo"):
temp_dict = {}
try:
temp_dict['name'] = d.find(class_="ListingCell-KeyInfo-title").text.strip()
temp_dict['description'] = d.find(class_="ListingCell-shortDescription").text.strip()
temp_dict['location'] = ' '.join([j.strip() for j in d.find(class_="ListingCell-KeyInfo-address").text.strip().split('\n')])
temp_dict['link'] = d.find(class_="ListingCell-KeyInfo-title").find('a').get('href')
temp_dict['price'] = d.find(class_="PriceSection-FirstPrice").text.strip()
temp_dict['operation'] = _operation
for att in d.find(class_="KeyInformation").find_all(class_="KeyInformation-attribute"):
att = att.text.lower()
if 'recámara' in att:
temp_dict['rooms'] = statistics.mean([int(s) for s in att.split() if s.isdigit()])
elif 'baño' in att:
temp_dict['bathrooms'] = statistics.mean([int(s) for s in att.split() if s.isdigit()])
elif 'terreno' in att:
temp_dict['terrain (m2)'] = statistics.mean([int(s) for s in att.split() if s.isdigit()])
elif 'superficie' in att:
temp_dict['construction (m2)'] = statistics.mean([int(s) for s in att.split() if s.isdigit()])
except Exception as e:
print(e)
continue
data = data.append(temp_dict, ignore_index=True)
print('Found {} depts'.format(len(data['name'])))
return data
def paginate():
""" Loop over pages to retrieve all info available
Returns:
-----
pg_nums : int
Number of pages scraped
"""
pg_nums = 1
while True:
try:
print(_base_url.format(pg_nums))
r = requests.get(_base_url.format(pg_nums),
headers=headers)
# Anti blocking delay
time.sleep(random.randint(5, 10))
if r.status_code != 200:
raise Exception("Wrong Response")
depts = scrape(r.content)
if depts.empty:
raise Exception("No more departments")
except Exception as e:
print(e)
print('Finishing to retrieve info.')
break
# Store values
save(depts)
pg_nums += 1
return pg_nums
def main():
""" Main method """
print('Starting to scrape Lamudi')
paginate()
return "Done"
if __name__ == '__main__':
main()
``` |
{
"source": "jorgeviz/otto",
"score": 3
} |
#### File: otto/core/bitso_.py
```python
from __future__ import print_function
import bitso
import requests
from pprint import pformat as pf
import datetime
import csv
import time
import sys
# from ..helpers import ottoHelpers # Need to update to Python3
class Bitso(object):
""" Class to perform Bitso trades over crypto-currencies
and store prices in a local DB for future analysis.
Bitso Class attrs:
- api : (bitso.Api) Bitso API object with authentification
- acc_status : (bitso.AccountStatus) Bitso Account Status object
- books : (list) Dictionaries with trading limits of each Currency-Pair
- books_avail : (list) Keys of all available Currency-Pairs in Bitso
- fees : (dict) Trading Fee in percentage indexed by Currency-Pair key
- balances : (dict) Total and Available balances indexed by Currency key
- currencies : (list) Available currencies
"""
def __init__(self, api_key=None, secret=None):
""" Constructor
Params:
-----
- api_key : (str) API KEY provided by Bitso
- secret : (str) API SECRET provided by Bitso
"""
if api_key is not None and secret is not None:
self.api = bitso.Api(api_key, secret)
else:
self.api = bitso.Api()
# Show Books
self.get_books() # Set True to show limits
# Show Fees
self.get_fees()
# Show Account limits
self.get_limits()
# Get Balances
self.get_balances()
#def get_auth(self):
# import time
# import hmac
# import hashlib
# import requests
# bitso_key = "BITSO_KEY"
# bitso_secret = "BITSO_SECRET"
# nonce = str(int(round(time.time() * 1000)))
# http_method = "GET"
# request_path = "/v3/balance/"
# json_payload = ""
# # Create signature
# message = nonce+http_method+request_path+json_payload
# signature = hmac.new(bitso_secret.encode('utf-8'),
# message.encode('utf-8'),
# hashlib.sha256).hexdigest()
# # Build the auth header
# auth_header = 'Bitso %s:%s:%s' % (bitso_key, nonce, signature)
# # Send request
# response = requests.get("https://api.bitso.com/v3/balance/", headers={"Authorization": auth_header})
# print response.content
def get_limits(self):
""" Method to retrieve and show account status
"""
self.acc_status = self.api.account_status()
# wait for a sec.
time.sleep(1)
print("Daily Limit: $", self.acc_status.daily_limit)
print("Daily Remaining: $", self.acc_status.daily_remaining, '\n')
def get_fees(self):
""" Method to retrieve and show fees
"""
_fees = self.api.fees()
# wait for a sec.
time.sleep(1)
# Obtain dict of fees
self.fees = {_f: float(_fees.__dict__[_f].fee_percent) \
for _f in _fees.__dict__.keys()\
if _f in self.books_avail}
print('Fees (%):', pf(self.fees), '\n')
def get_balances(self):
""" Method to retrieve and show account balances
"""
_balances = self.api.balances()
# wait for a sec.
time.sleep(1)
self.currencies = _balances.currencies
self.balances = {_b: {
'available': float(_bv.__dict__['available']),
'total': float(_bv.__dict__['total'])
} \
for _b, _bv in _balances.__dict__.items() \
if _b != 'currencies'}
print('Currencies: ', pf(self.currencies), '\n')
print('Balances: ', pf(self.balances), '\n')
def get_books(self, _show=False):
""" Method to show available books in bitso
Params:
-----
- _show : (bool) Show minimum and maximum order values in Bitso
"""
try:
# Books consultation
_av_books = requests.get("https://api.bitso.com/v3/available_books/")
# wait for a sec.
time.sleep(1)
except requests.exceptions.RequestException as _rexc:
print(_rexc)
return None
# Success verification
if _av_books.json()['success']:
self.books = _av_books.json()['payload']
else:
print('Request has not been successful!')
return None
# Results' display
if _show:
print(pf(self.books))
self.books_avail = [_x['book'] for _x in self.books]
print('Available books:', pf(self.books_avail), '\n')
def price(self, _book):
""" Method to verify Value of defined Pair of currencies
Params:
-----
- _book : (str) Book or Pair of currencies to verify
Returns:
-----
- (dict) Pair exchange values
>>> {
"book": "btc_mxn",
"volume": "22.31349615",
"high": "5750.00",
"last": "5633.98",
"low": "5450.00",
"vwap": "5393.45",
"ask": "5632.24",
"bid": "5520.01",
"created_at": "2016-04-08T17:52:31.000+00:00"
}
"""
try:
# Retrieve Book
_p = requests.get('https://api.bitso.com/v3/ticker/?book={}'.format(_book)).json()
# wait for a sec.
time.sleep(1.5)
except Exception as e:
print(e)
return None
# Success verification
if not _p['success']:
print('Request has not been successful!')
return None
# Save for later analysis
if not self.save_csv(_p['payload'], _p['payload']['book']):
print('Could not save data into file')
return _p['payload']
def all_prices(self, valid='all'):
""" Method to retrieve all prices from valid currencies
Params:
-----
valid: (str | list) 'all' if wants to perform over each currency, otherwise send list of Currency-Pairs
"""
# Validate currencies
if valid == 'all':
_pairs = self.books_avail
else:
_pairs = [_v for _v in valid if _v in self.books_avail]
curr_prices = {}
# Loop over each currency to retrieve price
for _c in _pairs:
max_tries = 3
for _try in range(max_tries):
try:
curr_prices[_c] = float(self.price(_c)['last'])
break
except TypeError:
# In case of type error
print('Could not fetch price, retrying...')
time.sleep(2)
if _try == (max_tries-1):
print('Exceeded trials, shutting down!')
sys.exit()
# Wait for 1 sec. to avoid being blocked
time.sleep(0.5)
print('Current Currency-Pair prices: \n', pf(curr_prices), '\n')
return curr_prices
def save_csv(self, _dict, f_name):
""" Method to convert JSON exchange values and save it into CSV dumps
Params:
- _dict: (dict) Data Values
- f_name: (str) File Name
Returns:
- (bool) Saving Status
"""
try:
# Verify if file existed
f = open('data/{}.csv'.format(f_name), 'r')
print('File existed, appending...')
f.close()
except IOError:
# If new file, write headers
f = open('data/{}.csv'.format(f_name), 'w')
print('Creating file with headers')
writer = csv.DictWriter(f, fieldnames=list(_dict.keys()))
writer.writeheader()
print('File created, appending...')
f.close()
try:
# Append data value into File
f = open('data/{}.csv'.format(f_name), 'a')
writer = csv.DictWriter(f, fieldnames=list(_dict.keys()))
writer.writerow(_dict)
print('Saved {} data!'.format(f_name))
except Exception as e:
print(e)
return False
return True
class BitsoTrade(Bitso):
""" Class to perform trades over Bitso exchange, which inheritates
all methods from Bitso class.
BitsoTrade attrs:
- trade_prices: (dict) Dictionary of last prices indexed by Currency-pairs
- base_lines: (dict) Dictionary of base_line indexed by Currency_pairs
"""
def __init__(self, api_key, secret):
""" Constructor
"""
# Initialize Bitso Parent Class
super(BitsoTrade, self).__init__(api_key, secret)
self.trade_prices = {}
self.base_lines = {}
def in_bounds(self, amount, _pair):
""" Method to check if transaction is within trading bounds in Bitso
For Book Limits:
- minimum_amount: Minimum amount of major when placing an order.
- maximum_amount: Maximum amount of major when placing an order.
Params:
-----
- amount : (float) Amount of Major currency to Trade
- _pair: (str) Currency-Pair key
Returns:
-----
- (bool) : Valid or not.
"""
# Verify if is valid currency-pair
if _pair not in self.books_avail:
print('{} is not a valid Currency-Pair'.format(_pair))
return False
# Fetch book limit info
_tbook = [_b for _b in self.books if _pair == _b['book']][0]
# Compare if above minimum amount
if float(amount) < float(_tbook['minimum_amount']):
print('{} is too small to perform transaction'.format(amount))
return False
# Compare if below maximum amount
if float(amount) > float(_tbook['maximum_amount']):
print('{} is too big to perform transaction'.format(amount))
return False
return True
def fetch_currency(self, _pair, _side):
""" Method to return the correct currency definition to verify in limits
Params:
-----
- _pair: (str) Currency-Pair to Trade (Major_minor)
- _side: (str, 'buy' | 'sell') Trading Position
Returns:
-----
- (dict) Corresponding currency to buy and sell
"""
if _side == 'buy':
return {
"buying": _pair.split('_')[0],
"selling": _pair.split('_')[1]
}
else:
return {
"buying": _pair.split('_')[1],
"selling": _pair.split('_')[0]
}
def enough_balance(self, amount, _pair, _selling):
""" Method to verify there is enough balance in the selling currency
to proceed with the transaction.
Params:
-----
- amount: (str) Major amount to Trade
- _pair: (str) Currency-pair
- _selling: (float) Selling currency
Returns:
-----
- (float) Current balance of the selling currency
- (NoneType) In case there is not enough money to execute transaction
"""
# Update Balances
self.get_balances()
# If selling major compute balance directly
if _selling == _pair.split('_')[0]:
# If not enough balance
print('Selling, so checking Major to verify balance')
if amount > self.balances[_selling]['available']:
print('Balance {} in {} is not enough to perform transaction for {}'
.format(self.balances[_selling]['available'],
_selling,
amount))
return None
print('Balance {} in {} enough to sell {}'
.format(self.balances[_selling]['available'],
_selling,
amount))
return self.balances[_selling]['available']
# If selling minor, get last price of exchange between currencies
exc_price = self.price(_pair)
tmp_balance = self.balances[_selling]['available']
# Converting minor into Major currency equivalence to validate correct balance
print('Buying, so converting minor into Major to verify balance')
if (amount * float(exc_price['last'])) > tmp_balance:
print('{} is not enough balance in {} to perform transaction for {}'
.format(tmp_balance,
_selling,
amount * float(exc_price['last'])))
return None
print('Balance {} in {} enough to sell {}'
.format(tmp_balance,
_selling,
amount * float(exc_price['last'])))
return tmp_balance
def verify_trade(self, _pair, _side, _major_amount):
""" Method to verify following transaction has enough balance and is big enough.
"""
# Fetch currency definition depending on trading position
_curr = self.fetch_currency(_pair, _side)
# Check buying currency amount is in book limits
if not self.in_bounds(_major_amount, _pair):
return False
# Check if there is enough balance in the selling currency
_bal = self.enough_balance(_major_amount, _pair, _curr['selling'])
if not _bal:
return False
print("""Transaction pre-verified!!
---
Buying {} of {}.
Current {} selling balance is {}.
---
""".format(_major_amount,
_curr['buying'],
_curr['selling'],
_bal
))
return True
def set_market_order(self, _pair, _side, _major_amount, only_check=True):
""" Method to place a Market order into Bitso for given Currency-Pair
and a defined Major amount.
Trading positions:
- Buy : converts minor into Major currency
- Sell : converts Major into minor currency
Params:
-----
- _pair: (str) Currency-Pair to Trade (Major_minor)
- _side: (str, 'buy' | 'sell') Trading Position
- _major_amount: (float) Major amount to trade
- only_check: (bool) Set False if want to execute order,
otherwise just verifies if is a valid transaction
Returns:
-----
- (bool) Transaction Status
"""
print("""Executing transaction:
---
Currency-Pair: {}
Position: {}
Major Amount: {}
---
""".format(_pair, _side, _major_amount))
# Verify trade
if not self.verify_trade(_pair, _side, _major_amount):
print('Transaction cannot be executed')
return False
print('Performing Transaction.....')
# Execute transaction
if not only_check:
try:
_transac = self.api.place_order(book=_pair,
side=_side,
order_type='market',
major=str(_major_amount))
# wait for some seconds...
time.sleep(3)
_transac.update({
'book': _pair,
'side': _side,
'order_type': 'market',
'major': str(_major_amount),
'created_at': str(datetime.datetime.utcnow())
})
print('Transaction correctly executed!')
# Save Transaction into file
if not self.save_csv(_transac, 'transactions'):
print('Could not save transactional data.')
except Exception as e:
print('Could not execute transaction:', e)
return False
return True
def update_series(self, valid):
""" Method to cache prices from valid currencies
Params:
-----
valid: (str | list) 'all' if wants to perform over each currency, otherwise send list of Currency-Pairs
"""
if valid != 'all' and not isinstance(valid, list):
print('Valid Pairs param has incorrect format!')
sys.exit()
self.trade_prices = self.all_prices(valid)
print('Trade prices successfully updated!')
def set_baseline(self, pair):
""" Method to set baseline to certain currency-pair using last known price
Params:
-----
- pair: (str) Currency-Pair
"""
if pair not in self.books_avail:
print('{} Pair is not supported!'.format(pair))
sys.exit()
self.base_lines.update({
pair: self.trade_prices[pair]
})
print('{} baseline successfully updated!'.format(pair))
def config_valid(self, config):
""" Method to verify if config object is valid
Params:
-----
- config: (dict) JSON file with all needed trade rules
Returns:
-----
- (bool) Config Validation
"""
if 'valid_pairs' not in config:
print('Valid Pairs param is missing!')
sys.exit()
if 'rules' not in config:
print('Valid Pairs param is missing!')
sys.exit()
if (set(config['rules'].keys()) != set(config['valid_pairs'])) or \
(len(config['rules'].keys()) != len(config['valid_pairs'])):
print('Valid Pairs and Rules must match!')
sys.exit()
return True
def evaluate_rule(self, pair, rule):
""" Method to evaluate which action must be executed after defined rules.
* If trading_price > (base_line + rule_selling_bound) Then: sell
* Else if trading_price < (base_line + rule_buying_bound) Then: buy
* Else: None
Params:
-----
- pair: (str) Currency Pair
- rule: (dict) Rule with selling and buying bounds
Returns:
-----
- (str | NoneType) Trading Position ('buy' | 'sell' | None)
"""
# Boundaries
upper_bound = self.base_lines[pair] + (self.base_lines[pair] * rule['selling_major_bound'])
lower_bound = self.base_lines[pair] + (self.base_lines[pair] * rule['buying_major_bound'])
# Selling evaluation
if self.trade_prices[pair] > upper_bound:
print('Selling: {} is MORE EXPENSIVE than {}'.format(self.trade_prices[pair], self.base_lines[pair]))
return 'sell'
elif self.trade_prices[pair] < lower_bound:
print('Buying: {} is CHEAPER than {}'.format(self.trade_prices[pair], self.base_lines[pair]))
return 'buy'
else:
print('Nothing: {} is almost the same than {}'.format(self.trade_prices[pair], self.base_lines[pair]))
print('Decision: {} > {} < {}'.format(lower_bound, self.trade_prices[pair], upper_bound))
return None
def get_acumulate(self):
""" Method to show Acumulated Balance and store results
"""
# Update Balances
self.get_balances()
# Total Balances dictionary
b_dict = {_k+'_total': _v['total'] for _k,_v in self.balances.items()}
# MXN equivalences
mxn_equivs = []
for _k, _v in self.balances.items():
if _k == 'mxn':
# Append MXN Peso
mxn_equivs.append(_v['total'])
continue
for _ba in self.books_avail:
if _k+'_mxn' == _ba:
# Append Direct MXN convertion
mxn_equivs.append(_v['total'] * self.trade_prices[_ba])
break
if _k+'_btc' == _ba:
# Append BTC-MXN convertion
mxn_equivs.append(_v['total']
* self.trade_prices[_ba]
* self.trade_prices['btc_mxn'])
break
# Update acumulate in dict
b_dict['acumulated_mxn'] = sum(mxn_equivs)
b_dict['created_at'] = str(datetime.datetime.utcnow())
print(""" Acumulated Balances:
----
----
{}
----
----
""".format(pf(b_dict)))
# Write balance in file
self.save_csv(b_dict, 'balances')
def automate(self, config):
""" Method to apply orders within defined rules in the config file.
Params:
- config: (dict) JSON file with all needed trade rules
>>> {
'valid_pairs': ['btc_mxn', 'eth_mxn'],
'rules': {
'btc_mxn':{
'selling_major_bound': 3, # In %
'buying_major_bound': -1.8, # In %
'major_amount' : 0.00003
},
'eth_mxn':{
'selling_major_bound': 2, # In %
'buying_major_bound': -1.8, # In %
'major_amount' : 0.003
}
}
}
"""
# Validate Config file
self.config_valid(config)
# Initialize
self.update_series(config['valid_pairs'])
# For each currency-pair
for vp in config['valid_pairs']:
# Set Baseline
self.set_baseline(vp)
try:
while True:
# Performance Delay
time.sleep(2) # Set to 2 secs.
# Update Price Series
self.update_series(config['valid_pairs'])
print()
# For each currency-pair
for vp in config['valid_pairs']:
# Evaluate action
print('Evaluating {}...'.format(vp))
_action = self.evaluate_rule(vp, config['rules'][vp])
if not _action:
# Passing action
print('Not action required!')
continue
print("################################################")
print("################################################")
print('Trying to perform {} in {}'.format(_action, vp))
# If exceeds limits, perform order
self.set_market_order(vp,
_action,
config['rules'][vp]['major_amount'],
only_check=False) # To actually perform Orders, set False
print("################################################")
print("################################################")
# Reset Baseline
self.set_baseline(vp)
# Get Acum Balances
self.get_acumulate()
print()
except KeyboardInterrupt:
print('\n Stoping trading!!!')
import sys
sys.exit()
```
#### File: otto/core/flash.py
```python
from flask import flash
def _escape(message):
"""Escape some characters in a message. Make them HTML friendly.
Params:
-----
- message : (str) Text to process.
Returns:
- (str) Escaped string.
"""
translations = {
'"': '"',
"'": ''',
'`': '‘',
'\n': '<br>',
}
for k, v in translations.items():
message = message.replace(k, v)
return message
def default(message):
return flash(_escape(message), 'default')
def success(message):
return flash(_escape(message), 'success')
def info(message):
return flash(_escape(message), 'info')
def warning(message):
return flash(_escape(message), 'warning')
def danger(message):
return flash(_escape(message), 'danger')
def well(message):
return flash(_escape(message), 'well')
def modal(message):
return flash(_escape(message), 'modal')
```
#### File: otto/otto/helpers.py
```python
import logging
import sys
class ottoHelpers(object):
""" Class to manage all needed dependecy objects for otto
"""
def __init__(self):
""" Constructor
"""
self.start_logger()
def start_logger(self):
""" Method to start Otto logger
"""
# Logger just Works for Python3, this will be updated
self.logger = logging.getLogger('Otto-CT-v0.0.1.beta') # Change logger
self.logger.info('Otto Logger is been activated.')
``` |
{
"source": "jorgeviz/pygres",
"score": 2
} |
#### File: pygres/pygres/errors.py
```python
class PygresError(Exception):
"""Exception raised for errors in the input.
"""
def __init__(self, expression, message):
self.expression = expression
self.message = message
```
#### File: pygres/pygres/pygres.py
```python
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from .errors import PygresError
from .model import Model
class Pygres(object):
conn = None
curs = None
model = Model
config = None
q = None
def __init__(self, config, **kwargs):
self.config = config
# Kwargs
self.autocommit = kwargs.get('autocommit', False)
#global conn
#global cur
if not self.config:
raise Pygres("Configuration variables missing",'Missing vars in config')
# Connection
try:
self.conn = psycopg2.connect(
database=self.config['SQL_DB'],
user=self.config['SQL_USER'],
password=self.config['<PASSWORD>'],
host=self.config['SQL_HOST'],
port=self.config['SQL_PORT']
)
# Isolation level, connection with autocommit
if self.autocommit:
self.conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
# Cursor
self.cur = self.conn.cursor()
except:
raise PygresError('Couldnt connect to Postgres','Missing')
sys.exit()
def close(self):
self.conn.close()
def model(self, table, pk, *initial_data,**kwargs):
return Model(self, table, pk, *initial_data,**kwargs)
def query(self,statement,values=[],commit=True):
self.cur.execute(statement, values)
self.q = self.cur.query
if commit:
self.conn.commit()
return self
def commit(self):
self.conn.commit()
return self
def rollback(self):
self.conn.rollback()
return self
def fetch(self):
columns = [desc[0] for desc in self.cur.description]
rows = self.cur.fetchall()
rows_list = []
for row in rows:
row_dict = {}
for i,col in enumerate(columns):
row_dict[col] = row[i]
rows_list.append(row_dict)
return rows_list
``` |
{
"source": "jorgeviz/yelp_recommender",
"score": 3
} |
#### File: jorgeviz/yelp_recommender/predict.py
```python
import sys
import time
from pyspark import SparkConf, SparkContext
from models import models
from config.config import *
from utils.misc import parse_predit_args, log, read_json
def create_spark():
""" Method to create Spark Context
Returns:
-----
sc : pyspark.SparkContext
"""
conf = SparkConf()\
.setAppName(APP_NAME)\
.setMaster("local[4]")\
.set("spark.executor.memory", "4g")\
.set("spark.executor.cores", "4")\
.set("spark.driver.cores", "2")\
.set("spark.driver.memory", "2g")
sc = SparkContext(conf=conf)
return sc
if __name__ == '__main__':
log(f"Starting {APP_NAME} predicting ...")
st_time = time.time()
args = parse_predit_args()
# load config
cfg = load_conf()
log(f"Using {cfg['class']}")
# create spark
sc = create_spark()
# Load testing data
testing = read_json(sc, args['test_file'])
# Init model
model = models[cfg['class']](sc, cfg)
# Load model and predict
model.load_model()
model.predict(testing, args['output_file'])
# model.predict_debug(testing, args['output_file'])
log(f"Finished predicting in {time.time() - st_time}")
```
#### File: yelp_recommender/scripts/predict_hybrid.py
```python
import sys
import json
import time
import os
import itertools
from pathlib import Path
import numpy as np
import pandas as pd
from scipy import sparse
from sklearn.neural_network import MLPRegressor
from sklearn.model_selection import train_test_split
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.recommendation import ALS, ALSModel
from pyspark.sql import Row
from pyspark import SparkContext, SparkConf, SQLContext
st_time= time.time()
MAX_PART_SIZE = 10 * (1024**2)
os.environ['PYSPARK_PYTHON'] = 'python3'
os.environ['PYSPARK_DRIVER_PYTHON'] = 'python3'
train_file = '../../data/project/train_review.json' # '/home/ccc_v1_s_YppY_173479/asn131942_7/asn131945_1/asnlib.0/publicdata/train_review.json'
test_file = sys.argv[1]
out_file = sys.argv[2]
def read_file(sc, fpath):
""" Read a file
"""
_fsize = Path(fpath).stat().st_size
return sc.textFile(fpath, _fsize // MAX_PART_SIZE )
def read_json(sc, fpath):
""" Read JSON-rowed file parsed in to RDD
"""
data = read_file(sc, fpath)\
.map(lambda x: json.loads(x))
return data
def create_spark():
""" Method to create Spark Context
Returns:
-----
sc : pyspark.SparkContext
"""
conf = SparkConf()\
.setAppName("ALS")\
.setMaster("local[*]")\
.set("spark.executor.memory","4g")\
.set("spark.driver.cores", "2")\
.set("spark.driver.memory", "4g")
sc = SparkContext(conf=conf)
return sc
sc = create_spark()
spark = SQLContext(sc)
print("-"*50, '\n', "ALS CF Hybrid Recommender System [Prediction]\n", "-"*50)
# Data
lines = read_json(sc, train_file)
parts = lines.map(lambda r: (r['user_id'], r['business_id'],r['stars']))
user_map = parts.map(lambda x: x[0]).distinct().zipWithIndex().collectAsMap()
print("Found Users: ", len(user_map))
biz_map = parts.map(lambda x: x[1]).distinct().zipWithIndex().collectAsMap()
print("Found Businesses: ", len(biz_map))
# -- TEST
# Evaluate the model by computing the RMSE on the test data
test = read_json(sc, test_file)\
.map(lambda r: (r['user_id'], r['business_id']))
# Update Mappings
miss_biz = set(test.map(lambda x: x[1]).distinct().collect()) - set(biz_map)
for m in miss_biz:
biz_map.update({m: biz_map.__len__()})
miss_user = set(test.map(lambda x: x[0]).distinct().collect()) - set(user_map)
for m in miss_user:
user_map.update({m: user_map.__len__()})
testRDD = test.map(lambda p: Row(
userId=int(user_map[p[0]]),
bizId=int(biz_map[p[1]])
)
)
testDF = spark.createDataFrame(testRDD).cache()
print("Test")
testDF.show(5)
# decoding indexes
inv_idxs = {
"user": {v:k for k,v in user_map.items()},
"biz": {v:k for k,v in biz_map.items()}
}
#############################################
# ALS
#############################################
MODEL_NAME = 'als_double_reg0.2_rank50.model'
als_model = ALSModel.load(MODEL_NAME)
predictions = als_model.transform(testDF)
predictions = predictions.fillna({'prediction': 2.5}).cache() # Cold Start
print('Preds')
predictions.show(3)
#############################################
# MLP
#############################################
avgs_files ={
'UAVG': '../../data/project/user_avg.json', #/home/ccc_v1_s_YppY_173479/asn131942_7/asn131945_1/asnlib.0/publicdata/user_avg.json
'BAVG': '../../data/project/business_avg.json' # '/home/ccc_v1_s_YppY_173479/asn131942_7/asn131945_1/asnlib.0/publicdata/business_avg.json'
}
def load_model():
model = np.load('hybridMLP.model.npy',
allow_pickle=True)
return model.item()
def read_avgs(data, avgs):
# averages
for _a, _af in avgs.items():
with open(_af, 'r') as _f:
acache = json.load(_f)
_dmean = np.mean([ij for ij in acache.values()])
_col = 'user_id' if _a.startswith('U') else 'business_id'
data[_a] = data[_col].apply(lambda v: acache.get(v, _dmean))
return data
mlp_model = load_model()
feats = predictions.toPandas()
feats['user_id'] = feats['userId'].apply(lambda x: inv_idxs['user'][x])
feats['business_id'] = feats['bizId'].apply(lambda x: inv_idxs['biz'][x])
feats.rename(columns={'prediction':'ALS'}, inplace=True)
feats = read_avgs(feats, avgs_files)
print("Features:\n", feats[['ALS', 'UAVG', 'BAVG']].head(5))
feats['stars'] = mlp_model.predict(feats[['ALS', 'UAVG', 'BAVG']])
# Save
with open(out_file, 'w') as f:
for j in feats[['user_id','business_id', 'stars']].to_dict(orient='records'):
f.write(json.dumps(j)+'\n')
print("Done predictions!")
sc.stop()
print("Took: ", time.time() - st_time)
```
#### File: yelp_recommender/scripts/train_als.py
```python
import json
import itertools
from pathlib import Path
import os
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.recommendation import ALS
from pyspark.sql import Row
from pyspark import SparkContext, SparkConf, SQLContext
MAX_PART_SIZE = 10 * (1024**2)
os.environ['PYSPARK_PYTHON'] = 'python3'
os.environ['PYSPARK_DRIVER_PYTHON'] = 'python3'
def read_file(sc, fpath):
""" Read a file
"""
_fsize = Path(fpath).stat().st_size
return sc.textFile(fpath, _fsize // MAX_PART_SIZE )
def read_json(sc, fpath):
""" Read JSON-rowed file parsed in to RDD
"""
data = read_file(sc, fpath)\
.map(lambda x: json.loads(x))
return data
def create_spark():
""" Method to create Spark Context
Returns:
-----
sc : pyspark.SparkContext
"""
conf = SparkConf()\
.setAppName("ALS")\
.setMaster("local[*]")\
.set("spark.executor.memory","4g")\
.set("spark.driver.cores", "2")\
.set("spark.driver.memory", "2g")
sc = SparkContext(conf=conf)
return sc
sc = create_spark()
spark = SQLContext(sc)
print("-"*50, '\n', "ALS CF Recommender System\n", "-"*50)
# Data
lines = read_json(sc, '/home/ccc_v1_s_YppY_173479/asn131942_7/asn131945_1/asnlib.0/publicdata/train_review.json')
parts = lines.map(lambda r: (r['user_id'], r['business_id'],r['stars']))
user_map = parts.map(lambda x: x[0]).distinct().zipWithIndex().collectAsMap()
print("Found Users: ", len(user_map))
biz_map = parts.map(lambda x: x[1]).distinct().zipWithIndex().collectAsMap()
print("Found Businesses: ", len(biz_map))
ratingsRDD = parts.map(lambda p: Row(
userId=int(user_map[p[0]]),
bizId=int(biz_map[p[1]]),
rating=float(p[2])
)
)
ratings = spark.createDataFrame(ratingsRDD).cache()
# ####### TEST
# Evaluate the model by computing the RMSE on the test data
test = read_json(sc, '/home/ccc_v1_w_M2ZhZ_97044/asn131942_7/asn131945_1/work/test_review_ratings.json')\
.map(lambda r: (r['user_id'], r['business_id'],r['stars']))
# Update Mappings
miss_biz = set(test.map(lambda x: x[1]).distinct().collect()) - set(biz_map)
for m in miss_biz:
biz_map.update({m: biz_map.__len__()})
miss_user = set(test.map(lambda x: x[0]).distinct().collect()) - set(user_map)
for m in miss_user:
user_map.update({m: user_map.__len__()})
testRDD = test.map(lambda p: Row(
userId=int(user_map[p[0]]),
bizId=int(biz_map[p[1]]),
rating=float(p[2])
)
)
testDF = spark.createDataFrame(testRDD).cache()
print("Test")
testDF.show(5)
# hyper parameters
ranks_ = [40, 50]
regs_ = [0.2, 0.4]
DF = ratings.union(testDF)
(training, val) = DF.randomSplit([0.9, 0.1])
training.show(5)
for (rg, r) in itertools.product(regs_, ranks_):
# Build the recommendation model using ALS on the training data
als = ALS(maxIter=18, rank=r, regParam=rg, userCol="userId", itemCol="bizId", ratingCol="rating", coldStartStrategy='nan')
model = als.fit(training)
predictions = model.transform(val)
evaluator = RegressionEvaluator(metricName="rmse", labelCol="rating", predictionCol="prediction")
val_rmse = evaluator.evaluate(predictions)
print('[VAL]','-'*50,'\nALS Rank:', r, 'Reg:', rg)
print("[VAL] Root-mean-square error = " + str(val_rmse))
model.save(f'als_18_double_reg{rg}_rank{r}.model')
predictions = model.transform(testDF)
# Coldstart
predictions = predictions.fillna({'prediction': 2.5})
rmse = evaluator.evaluate(predictions)
print("[TEST] Root-mean-square error = ", rmse)
sc.stop()
```
#### File: yelp_recommender/scripts/train_user_cf.py
```python
import sys
import math
import re
import time
from pprint import pprint
import json
from pathlib import Path
from collections import OrderedDict, Counter
from operator import add
from pyspark import SparkContext, SparkConf
from minhash import minhash
from lsh import lsh, filter_valid
from utils import pearson_correlation
from collections import namedtuple
import ipdb
# Debugger
# import ipdb
DEBUG = False
#
# Params
N_SIGNATURES = 512
LSH_BANDS = 256
LSH_ROWS = N_SIGNATURES // LSH_BANDS
MIN_JACC_SIM = 0.01
MAX_PART_SIZE = 10 * (1024**2)
# Minimum number of Co-rated businesses to consider similar
MIN_CO_RATED = 2
def fetch_arg(_pos):
""" Fetch arg from sys args
"""
if len(sys.argv) <= _pos:
raise Exception("Missing arguments!")
return sys.argv[_pos]
def parse_args():
""" Parse arguments
"""
args = OrderedDict({
"train_file": str, "model_file": str,
"cf_type": str
})
for i, a in enumerate(args):
args[a] = args[a](fetch_arg(i + 1))
return args
def create_spark():
""" Method to create Spark Context
Returns:
-----
sc : pyspark.SparkContext
"""
conf = SparkConf()\
.setAppName("Task3")\
.setMaster("local[3]")\
.set("spark.executor.memory","4g")\
.set("spark.driver.cores", "3")\
.set("spark.driver.memory", "3g")
sc = SparkContext(conf=conf)
return sc
def read_file(sc, fpath):
""" Read a file
"""
_fsize = Path(fpath).stat().st_size
return sc.textFile(fpath, _fsize // MAX_PART_SIZE )
def read_json(sc, fpath):
""" Read JSON-rowed file parsed in to RDD
"""
data = read_file(sc, fpath)\
.map(lambda x: json.loads(x))
return data
def read_csv(sc, fpath, with_heads=False):
""" Read and parse CSV into RDD
"""
def filter_heads(z): return z[1] > 0
data = read_file(sc, fpath)\
.zipWithIndex()\
.filter(lambda z: True if with_heads else filter_heads(z))\
.map(lambda z: tuple(z[0].split(',')))
return data
def log(*msg, level="INFO"):
""" Log message with visual help
"""
print("-"*50)
print("[{}]".format(level), end=" ")
for m in msg:
print("{}".format(m), end=" ")
print("\n" + "-"*50)
def compute_w_log(msg, fn):
""" Compute Spark action with
logged message and timestamp
"""
if not DEBUG:
return
t1 = time.time()
z = fn()
log(msg, z, 'in', time.time() - t1)
ipdb.set_trace()
# ------------ Item Based CF ------------------------
def get_biz_ratings(data):
""" Get business ratings
"""
log("Incoming business", data.map(lambda x: x['business_id']).distinct().count())
biz_ratings = data\
.map(lambda x: (x['business_id'], (x['user_id'], x['stars'])))\
.groupByKey()
biz_ratings.cache()
#compute_w_log("Biz ratings:", biz_ratings.count)
return biz_ratings, biz_ratings.sortByKey().keys()
def get_joined_biz_candidates(biz_data, biz_candids):
""" Compute Joined RDD with candidates
key and rating values
biz_data: (business_id, (user1, user2))
biz_candids: (business_id1, business_id2)
(business_id1, business_id2, (val1, ...), (val2,...))
"""
# join columns using cache of unique users
BCache = namedtuple('BCache', ('set', 'dict'))
biz_cache = biz_data\
.mapValues(lambda x: BCache(set([j[0] for j in x]), dict(x)) )\
.collectAsMap()
joined_cands = biz_candids.map(lambda x: (
(x[0], x[1]), (biz_cache[x[0]].set, biz_cache[x[1]].set)
)
)
# filter the ones with less than Min Co-rated
joined_cands = joined_cands\
.mapValues(lambda v: v[0].intersection(v[1]))\
.filter(lambda s: s[1].__len__() >= MIN_CO_RATED)
compute_w_log("Joined Filtered:", joined_cands.count)
# compute intersection
def get_ratings_inters(x):
(b1, b2), inters = x
return {_k: (biz_cache[b1].dict[_k], biz_cache[b2].dict[_k])\
for _k in inters}
filtered_cands = joined_cands\
.map(lambda x: (x[0], get_ratings_inters(x)) )
compute_w_log("Intersection Candidates", filtered_cands.count)
return filtered_cands
def get_item_based_cf(data):
""" Get similar business pairs
and respective Pearson weights
"""
# Fetch business ratings rdd
biz_rdd, biz_keys = get_biz_ratings(data)
# Generate candidates
biz_candids = biz_keys.cartesian(biz_keys)\
.filter(lambda x: x[0] < x[1])
#compute_w_log("Candidates", biz_candids.count)
# Generate joined canddates rdd
biz_filtered = get_joined_biz_candidates(biz_rdd, biz_candids)
# filter non-min co-rated pairs
# biz_filtered = biz_joined\
# .filter(lambda x: x[1][2].__len__() >= MIN_CO_RATED)
biz_filtered.cache()
compute_w_log("Filtered possible Cands", biz_filtered.count)
# Compute Pearson Correlation
biz_corr = biz_filtered\
.map(lambda x: (x[0], pearson_correlation(x[1])))
compute_w_log("Candidates Corr", biz_corr.count)
_biz_correls = biz_corr.collect()
log("Candidates Pairs:", len(_biz_correls))
return _biz_correls
# ------------ End of Item Based CF -----------------
# ------------ User Based CF ------------------------
def get_rating_shingles(data):
""" Map from user id to the Index of the
business at which gave a review
"""
biz_map = dict(data.flatMapValues(lambda x: x)
.map(lambda x: x[1][0])
.distinct()
.zipWithIndex()
.collect())
# group by user_id and reduce unique user indexes
user_ratings = data\
.flatMapValues(lambda x: x)\
.map(lambda x: (x[0], x[1][0]))\
.groupByKey()\
.mapValues(lambda x: set(biz_map[_k] for _k in set(x)))
return user_ratings, biz_map
def minhash_lsh_candidates(data):
""" Compute boolean minhash lsh to
yield candidates over ratings matrix
"""
# get shingles
user_ratings, biz_map = get_rating_shingles(data)
compute_w_log("Rating Shingles", user_ratings.count)
# minhash signatures
user_signs = minhash(user_ratings, len(biz_map), N_SIGNATURES)
compute_w_log("Minhash Signatures", user_signs.count)
# compute LSH buckets
user_candidates = lsh(user_signs, LSH_BANDS, LSH_ROWS)
compute_w_log("LSH Candidates", user_candidates.count)
# Join with ratings and compute jaccard sim
cache_vects = dict(user_candidates\
.map(lambda x: x[1])\
.flatMap(lambda x: [(x_i, 1) for x_i in x ])\
.join(user_ratings)\
.map(lambda x: (x[0], x[1][1]))\
.collect()
)
valid_user_cands = filter_valid(user_candidates,
cache_vects,
MIN_JACC_SIM,
serialize=False
)
valid_user_cands.cache()
compute_w_log("Valid Candidates", valid_user_cands.count)
# del cache_vects
return valid_user_cands
def get_user_ratings(data):
""" Get user's ratings
"""
user_ratings = data\
.map(lambda x: (x['user_id'], (x['business_id'], x['stars'])))\
.groupByKey()\
.filter(lambda x: len(x[1]) >= MIN_CO_RATED)
user_ratings.cache()
compute_w_log("User ratings:", user_ratings.count)
return user_ratings
def get_joined_user_candidates(user_data, user_cands):
""" Join candidates with features
"""
user_cache = dict(user_data.collect())
joined_cands = user_cands.map(lambda x: (
(x[0], x[1]), (user_cache[x[0]], user_cache[x[1]])
)
)
# compute intersection
def get_ratings_inters(x):
v1, v2 = x; inters = {}
v1 = dict(v1); v2 = dict(v2)
for _k, _v1 in v1.items():
if _k in v2:
inters[_k] = (_v1, v2[_k])
return v1, v2, inters
joined_cands = joined_cands\
.map(lambda x: (x[0], get_ratings_inters(x[1])))
compute_w_log("Joined Cache Users:", joined_cands.count)
return joined_cands
def get_user_based_cf(data):
""" Get similar users by generating possible
candidates over MinHash-LSH and computing
respective pearson correlation
"""
# Build users rdd -- (user, (biz, stars))
users_rdd = get_user_ratings(data)
# Fetch Candidates from MinHash-LSH
user_candids = minhash_lsh_candidates(users_rdd)
# Join candidates with features
user_joined_cand = get_joined_user_candidates(users_rdd, user_candids)
# filter those without enough co-rated
user_filt_cand = user_joined_cand\
.filter(lambda x: x[1][2].__len__() >= MIN_CO_RATED)
compute_w_log("Filtered Cands", user_filt_cand.count)
# Compute Pearson Correlation
user_corrs = user_filt_cand\
.map(lambda x: (x[0], pearson_correlation(x[1][2])))
compute_w_log("Candidates Correl", user_corrs.count)
return user_corrs.collect()
# ------------ End of User Based CF -----------------
if __name__ == "__main__":
log("Starting Task 3 [Train]- Collaborative Filtering recommentation system")
st_time = time.time()
args = parse_args()
log("Arguments: ", args)
sc = create_spark()
# Read data
train_data = read_json(sc, args['train_file'])
cf_type = args['cf_type']
log("Loaded Training data")
# Redirect to CF-type method
if cf_type == "item_based":
model_wgts = get_item_based_cf(train_data)
else:
model_wgts = get_user_based_cf(train_data)
# Save model
with open(args['model_file'], 'w') as mdl:
k = ["b1", "b2"] \
if cf_type == "item_based"\
else ["u1", "u2"]
for v in model_wgts:
mdl.write(json.dumps({
k[0]: v[0][0],
k[1]: v[0][1],
"stars": v[1]
})+"\n")
log("Finished Task 3 [TRAIN], Saved Model!")
log("Job duration: ", time.time()-st_time)
```
#### File: jorgeviz/yelp_recommender/train.py
```python
import sys
import time
from pyspark import SparkConf, SparkContext
from config.config import *
from models import models
from utils.misc import log, read_json
def create_spark():
""" Method to create Spark Context
Returns:
-----
sc : pyspark.SparkContext
"""
conf = SparkConf()\
.setAppName(APP_NAME)\
.setMaster("local[4]")\
.set("spark.executor.memory", "4g")\
.set("spark.executor.cores", "4")\
.set("spark.driver.cores", "2")\
.set("spark.driver.memory", "2g")
sc = SparkContext(conf=conf)
return sc
if __name__ == '__main__':
log(f"Starting {APP_NAME} training ...")
st_time = time.time()
# load config
cfg = load_conf()
log(f"Using {cfg['class']}")
# create spark
sc = create_spark()
# Load training data
training = read_json(sc, cfg['training_data'])
# Init model
model = models[cfg['class']](sc, cfg)
# Start training
model.train(training)
log(f"Finished training in {time.time()- st_time }")
``` |
{
"source": "jorgeyuri11/BLASTOFF",
"score": 4
} |
#### File: jorgeyuri11/BLASTOFF/Q14.py
```python
def isPalindrome(x):
if x == x[::-1]:
print("A palavra é palíndromo!")
else:
print("A palavra não é palíndromo!")
isPalindrome("arara")
```
#### File: jorgeyuri11/BLASTOFF/Q2.py
```python
def avgConsume (A, B):
x = A/B
print ("O consumo médio foi de: ", x ,"km/l")
avgConsume(100,20)
```
#### File: jorgeyuri11/BLASTOFF/Q3.py
```python
def minNumber (a, b, c):
x = min(a, b, c)
print ("O valor minímo é: ", x)
minNumber(10.23,20.43,15.87)
```
#### File: jorgeyuri11/BLASTOFF/Q9.py
```python
def numberTable (x):
for i in range (1,11):
print ("{}x{} = ".format(x, i),x*i)
numberTable(4)
``` |
{
"source": "Jorgezepmed/Platzi_python",
"score": 4
} |
#### File: Jorgezepmed/Platzi_python/list_and_dics.py
```python
def run():
my_list = [1, "Hello", True, 4.5]
my_dict = {"firstname": "Facundo", "lastname": "GarcÃa"}
super_list = [
{"firstname": "Facundo", "lastname": "GarcÃa"},
{"firstname": "Miguel", "lastname": "Rodriguez"},
{"firstname": "Pablo", "lastname": "Trinidad"},
{"firstname": "Susana", "lastname": "Martinez"},
{"firstname": "José", "lastname": "Fernandez"},
]
super_dict = {
"natural_nums": [1, 2, 3, 4, 5],
"integer_nums": [-1, -2, 3, 0, 1],
"floating_nums": [1.1, 4.55, 6.43],
}
for key, value in super_dict.items():
print(key, ">", value)
if __name__ == '__main__':
run()
``` |
{
"source": "jorgii/lawn-mowers",
"score": 4
} |
#### File: lawn-mowers/models/grid.py
```python
from core.exceptions import OutOfBoundsError, InvalidGridCoordinates
class Grid(object):
START_X = 0
START_Y = 0
def __init__(self, x, y):
self.x = x
self.y = y
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
x = int(x)
if x < self.START_X:
raise(InvalidGridCoordinates)
self.__x = x
@property
def y(self):
return self.__y
@y.setter
def y(self, y):
y = int(y)
if y < self.START_Y:
raise(InvalidGridCoordinates)
self.__y = y
@property
def boundaries(self):
return '[{}:{}] - [{}:{}]'.format(
self.START_X,
self.START_Y,
self.x,
self.y
)
class Position(object):
def __init__(self, x, y, grid):
self.grid = grid
self.x = x
self.y = y
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
x = int(x)
if x > self.grid.x or x < self.grid.START_X:
raise OutOfBoundsError(
'X ({}) is outside of grid boundaries '
'({})'.format(x, self.grid.boundaries))
self.__x = x
@property
def y(self):
return self.__y
@y.setter
def y(self, y):
y = int(y)
if y > self.grid.y or y < self.grid.START_Y:
raise OutOfBoundsError(
'Y ({}) is outside of grid boundaries '
'({})'.format(y, self.grid.boundaries))
self.__y = y
```
#### File: lawn-mowers/models/lawn_mower.py
```python
from core.exceptions import UnexpectedOrientation, OutOfBoundsError
class LawnMower(object):
ORIENTATIONS = ('N', 'E', 'W', 'S')
ROTATIONS = ('L', 'R')
ORIENTATION_ROTATIONS = {
'N': {'L': 'W', 'R': 'E'},
'E': {'L': 'N', 'R': 'S'},
'W': {'L': 'S', 'R': 'N'},
'S': {'L': 'E', 'R': 'W'},
}
MOVEMENTS = ('F')
ORIENTATION_MOVEMENTS = {
'N': {'F': {'attr': 'y', 'magnitude': 1}},
'E': {'F': {'attr': 'x', 'magnitude': 1}},
'W': {'F': {'attr': 'x', 'magnitude': -1}},
'S': {'F': {'attr': 'y', 'magnitude': -1}},
}
def __init__(self, position, orientation):
self.position = position
self.orientation = orientation
def __repr__(self):
return 'Lawn Mower at {}:{}:{}'.format(
self.position.x, self.position.y, self.orientation)
@property
def orientation(self):
return self.__orientation
@orientation.setter
def orientation(self, orientation):
if orientation not in self.ORIENTATIONS:
raise UnexpectedOrientation(
'Unexpected orientation. '
'Expected values: {}'.format(self.ORIENTATIONS))
self.__orientation = orientation
def rotate(self, rotation):
self.orientation = self.ORIENTATION_ROTATIONS[
self.orientation][rotation]
def move(self, movement):
attr = self.ORIENTATION_MOVEMENTS[self.orientation][movement]['attr']
magnitude = self.ORIENTATION_MOVEMENTS[
self.orientation][movement]['magnitude']
current_attr_value = getattr(self.position, attr)
try:
setattr(self.position, attr, current_attr_value + magnitude)
except OutOfBoundsError:
pass
def perform_actions(self, actions):
for action in actions:
if action in self.ROTATIONS:
self.rotate(action)
elif action in self.MOVEMENTS:
self.move(action)
```
#### File: lawn-mowers/models/test_grid.py
```python
import pytest
from models import Grid, Position
from core.exceptions import OutOfBoundsError, InvalidGridCoordinates
class TestGrid(object):
def test_grid_x_str_value_error(self):
with pytest.raises(ValueError):
Grid('foo', 2)
def test_grid_y_str_value_error(self):
with pytest.raises(ValueError):
Grid(2, 'foo')
def test_invalid_grid_coordinates_x(self):
with pytest.raises(InvalidGridCoordinates):
Grid(-1, 0)
def test_invalid_grid_coordinates_y(self):
with pytest.raises(InvalidGridCoordinates):
Grid(0, -1)
class TestPosition(object):
@pytest.fixture
def grid(self):
return Grid(5, 5)
def test_positions_x_out_of_bounds(self, grid):
with pytest.raises(OutOfBoundsError):
Position(6, 5, grid)
def test_positions_x_out_of_bounds_negative(self, grid):
with pytest.raises(OutOfBoundsError):
Position(-1, 5, grid)
def test_positions_y_out_of_bounds(self, grid):
with pytest.raises(OutOfBoundsError):
Position(5, 6, grid)
def test_positions_y_out_of_bounds_negative(self, grid):
with pytest.raises(OutOfBoundsError):
Position(5, -1, grid)
def test_position_x_str_value_error(self, grid):
with pytest.raises(ValueError):
Position('foo', 2, grid)
def test_position_y_str_value_error(self, grid):
with pytest.raises(ValueError):
Position(2, 'foo', grid)
def test_position_x_y_bounds_lower_left(self, grid):
position = Position(0, 0, grid)
assert position.x == 0
assert position.y == 0
def test_position_x_y_bounds_lower_right(self, grid):
position = Position(5, 0, grid)
assert position.x == 5
assert position.y == 0
def test_position_x_y_bounds_upper_left(self, grid):
position = Position(0, 5, grid)
assert position.x == 0
assert position.y == 5
def test_position_x_y_bounds_upper_right(self, grid):
position = Position(5, 5, grid)
assert position.x == 5
assert position.y == 5
```
#### File: lawn-mowers/models/test_lawn_mower.py
```python
import pytest
from models import LawnMower, Grid, Position
from core.exceptions import UnexpectedOrientation
class TestLawnMower(object):
@pytest.fixture
def position_1_1(self):
grid = Grid(5, 5)
return Position(1, 1, grid)
@pytest.fixture
def position_0_0(self):
grid = Grid(5, 5)
return Position(0, 0, grid)
@pytest.fixture
def position_0_5(self):
grid = Grid(5, 5)
return Position(0, 5, grid)
@pytest.fixture
def position_5_5(self):
grid = Grid(5, 5)
return Position(5, 5, grid)
@pytest.fixture
def position_5_0(self):
grid = Grid(5, 5)
return Position(5, 0, grid)
def test_unexpected_orientation(self, position_1_1):
with pytest.raises(UnexpectedOrientation):
LawnMower(position_1_1, 'foo')
def test_n_orientation(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'N')
assert lawn_mower.orientation == 'N'
def test_e_orientation(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'E')
assert lawn_mower.orientation == 'E'
def test_w_orientation(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'W')
assert lawn_mower.orientation == 'W'
def test_s_orientation(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'S')
assert lawn_mower.orientation == 'S'
def test_rotate_n_l(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'N')
lawn_mower.rotate('L')
assert lawn_mower.orientation == 'W'
def test_rotate_n_r(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'N')
lawn_mower.rotate('R')
assert lawn_mower.orientation == 'E'
def test_rotate_e_l(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'E')
lawn_mower.rotate('L')
assert lawn_mower.orientation == 'N'
def test_rotate_e_r(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'E')
lawn_mower.rotate('R')
assert lawn_mower.orientation == 'S'
def test_rotate_w_l(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'W')
lawn_mower.rotate('L')
assert lawn_mower.orientation == 'S'
def test_rotate_w_r(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'W')
lawn_mower.rotate('R')
assert lawn_mower.orientation == 'N'
def test_rotate_s_l(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'S')
lawn_mower.rotate('L')
assert lawn_mower.orientation == 'E'
def test_rotate_s_r(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'S')
lawn_mower.rotate('R')
assert lawn_mower.orientation == 'W'
def test_move_1_1_n(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'N')
lawn_mower.move('F')
assert lawn_mower.position.x == 1
assert lawn_mower.position.y == 2
def test_move_1_1_e(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'E')
lawn_mower.move('F')
assert lawn_mower.position.x == 2
assert lawn_mower.position.y == 1
def test_move_1_1_w(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'W')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 1
def test_move_1_1_s(self, position_1_1):
lawn_mower = LawnMower(position_1_1, 'S')
lawn_mower.move('F')
assert lawn_mower.position.x == 1
assert lawn_mower.position.y == 0
def test_move_0_0_n(self, position_0_0):
lawn_mower = LawnMower(position_0_0, 'N')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 1
def test_move_0_0_e(self, position_0_0):
lawn_mower = LawnMower(position_0_0, 'E')
lawn_mower.move('F')
assert lawn_mower.position.x == 1
assert lawn_mower.position.y == 0
def test_move_0_0_w(self, position_0_0):
lawn_mower = LawnMower(position_0_0, 'W')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 0
def test_move_0_0_s(self, position_0_0):
lawn_mower = LawnMower(position_0_0, 'S')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 0
def test_move_0_5_n(self, position_0_5):
lawn_mower = LawnMower(position_0_5, 'N')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 5
def test_move_0_5_e(self, position_0_5):
lawn_mower = LawnMower(position_0_5, 'E')
lawn_mower.move('F')
assert lawn_mower.position.x == 1
assert lawn_mower.position.y == 5
def test_move_0_5_w(self, position_0_5):
lawn_mower = LawnMower(position_0_5, 'W')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 5
def test_move_0_5_s(self, position_0_5):
lawn_mower = LawnMower(position_0_5, 'S')
lawn_mower.move('F')
assert lawn_mower.position.x == 0
assert lawn_mower.position.y == 4
def test_move_5_5_n(self, position_5_5):
lawn_mower = LawnMower(position_5_5, 'N')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 5
def test_move_5_5_e(self, position_5_5):
lawn_mower = LawnMower(position_5_5, 'E')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 5
def test_move_5_5_w(self, position_5_5):
lawn_mower = LawnMower(position_5_5, 'W')
lawn_mower.move('F')
assert lawn_mower.position.x == 4
assert lawn_mower.position.y == 5
def test_move_5_5_s(self, position_5_5):
lawn_mower = LawnMower(position_5_5, 'S')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 4
def test_move_5_0_n(self, position_5_0):
lawn_mower = LawnMower(position_5_0, 'N')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 1
def test_move_5_0_e(self, position_5_0):
lawn_mower = LawnMower(position_5_0, 'E')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 0
def test_move_5_0_w(self, position_5_0):
lawn_mower = LawnMower(position_5_0, 'W')
lawn_mower.move('F')
assert lawn_mower.position.x == 4
assert lawn_mower.position.y == 0
def test_move_5_0_s(self, position_5_0):
lawn_mower = LawnMower(position_5_0, 'S')
lawn_mower.move('F')
assert lawn_mower.position.x == 5
assert lawn_mower.position.y == 0
```
#### File: lawn-mowers/utils/parser.py
```python
def parse_file(file_path):
result = {}
with open(file_path, 'r') as f:
lines = f.read().splitlines()
result['grid'] = [int(i) for i in lines[0].split()]
result['lawn_mowers'] = []
lm_data_start = 1
lm_data_end = 3
has_lawn_mowers = True
while has_lawn_mowers:
lawn_mower = {}
try:
lawn_mower_lines = lines[lm_data_start:lm_data_end]
lawn_mower['position'] = [
int(i) for i in lawn_mower_lines[0].split()[0:2]]
lawn_mower['orientation'] = lawn_mower_lines[0].split()[2]
lawn_mower['actions'] = list(lawn_mower_lines[1])
result['lawn_mowers'].append(lawn_mower)
lm_data_start += 2
lm_data_end += 2
except IndexError:
has_lawn_mowers = False
return result
``` |
{
"source": "jorgii/universal-api-client",
"score": 2
} |
#### File: universal-api-client/tests/test_client.py
```python
import pytest
from requests.auth import HTTPBasicAuth
from universal_api_client.client import Client
from universal_api_client.request import APIRequest
@pytest.fixture
def base_url():
return 'https://swapi.co/api/'
@pytest.fixture
def client(base_url):
"""API client fixture"""
return Client(base_url=base_url)
def test_base_url():
client = Client(base_url='https://google.com/')
assert client.base_url == 'https://google.com/'
def test_auth():
auth = HTTPBasicAuth('user', 'pass')
client = Client(
base_url='https://google.com/',
auth=auth)
assert client.auth == auth
def test_generate_request(client):
assert isinstance(client.request, APIRequest)
def test_multiple_client_request_calls(client, base_url):
assert client.request.people.url == \
'{}people/'.format(base_url)
assert client.request.people.url == \
'{}people/'.format(base_url)
assert client.request.people.url == \
'{}people/'.format(base_url)
def test_client_no_trailing_slash():
client = Client(
base_url='https://google.com/',
trailing_slash=False)
assert client.request.trailing_slash is False
``` |
{
"source": "jorgimello/meta-learning-fer",
"score": 3
} |
#### File: meta-learning-fer/data/fer_plus_data_generator.py
```python
from constants import *
from sklearn.model_selection import train_test_split
from mtcnn.mtcnn import MTCNN
import numpy as np
import os, csv, cv2
# Instantiate face detector
detector = MTCNN()
# Prepare label
def emotion_to_vec(x):
d = np.zeros(len(FER_2013_EMOTIONS))
d[x] = 1.0
return d
# Detect and return only the face in the image
def crop_face(image):
result = detector.detect_faces(image)
if result:
bbox = result[0]['box']
for i in range(len(bbox)): # Avoid getting negative coordinates via MTCNN
if bbox[i] < 0:
bbox[i] = 0
image = image[bbox[1]:bbox[1] + bbox[3], bbox[0]:
bbox[0] + bbox[2], :]
return image
else:
return []
# Read, crop, and convert image to grayscale
def format_image(image_path, img):
if img.split('.')[1] == 'png': # Avoid formatting files that are not images
input_img = cv2.imread((image_path + img), cv2.IMREAD_COLOR) # Convert to RGB to detect faces via MTCNN
input_img = crop_face(input_img)
if input_img != []: # If face was detected
input_img = cv2.resize(input_img, (34, 44), cv2.INTER_LINEAR)
input_img = cv2.cvtColor(input_img, cv2.COLOR_BGR2GRAY)
return input_img
return []
data_labels = []
data_images = []
test_labels = []
test_images = []
# Open csv train and and test files containing image filename and labels
# 1. Test set
print ('[+] Starting test set')
csv_file_path = FER_2013_DATASET_PATH + FER_2013_TEST_FOLDER + 'label.csv'
with open(csv_file_path) as csvfile:
rows = csv.reader(csvfile)
for row in rows:
emotions = list(map(float, row[2:len(row)]))
emotion = np.argwhere(emotions == np.amax(emotions)).flatten().tolist()
if len(emotion) == 1 and emotion[0] < len(FER_2013_EMOTIONS): # If there is a highest emotion and not-unknown or not-a-face
image_path = FER_2013_DATASET_PATH + FER_2013_TEST_FOLDER
img = row[0]
print ('[+] Current file: ', img)
input_img = format_image(image_path, img)
if input_img != []:
test_labels.append(emotion_to_vec(emotion[0]))
test_images.append(input_img)
print ('[+] Test set completed')
# 2. Train set
print ('[+] Starting train set')
csv_file_path = FER_2013_DATASET_PATH + FER_2013_TRAIN_FOLDER + 'label.csv'
with open(csv_file_path) as csvfile:
rows = csv.reader(csvfile)
for row in rows:
emotions = list(map(float, row[2:len(row)]))
emotion = np.argwhere(emotions == np.amax(emotions)).flatten().tolist()
if len(emotion) == 1 and emotion[0] < len(FER_2013_EMOTIONS): # If there is a highest emotion and not-unknown or not-a-face
image_path = FER_2013_DATASET_PATH + FER_2013_TRAIN_FOLDER
img = row[0]
print ('[+] Current file: ', img)
input_img = format_image(image_path, img)
if input_img != []:
data_labels.append(emotion_to_vec(emotion[0]))
data_images.append(input_img)
print ('[+] Train set completed')
x_train = np.asarray(data_images)
y_train = np.asarray(data_labels)
x_test = np.asarray(test_images)
y_test = np.asarray(test_labels)
np.save(SAVE_DATASET_FER_2013_IMAGES_FILENAME, x_train)
np.save(SAVE_DATASET_FER_2013_LABELS_FILENAME, y_train)
np.save(SAVE_DATASET_FER_2013_IMAGES_TEST_FILENAME, x_test)
np.save(SAVE_DATASET_FER_2013_LABELS_TEST_FILENAME, y_test)
print ('[+] FERPlus dataset and labels saved!')
``` |
{
"source": "jorgstei/Datateknologi",
"score": 3
} |
#### File: A1/assignment1/task2ab.py
```python
import matplotlib.pyplot as plt
import pathlib
import numpy as np
from utils import read_im, save_im
output_dir = pathlib.Path("image_solutions")
output_dir.mkdir(exist_ok=True)
im = read_im(pathlib.Path("images", "lake.jpg"))
plt.imshow(im)
def greyscale(im):
""" Converts an RGB image to greyscale
Args:
im ([type]): [np.array of shape [H, W, 3]]
Returns:
im ([type]): [np.array of shape [H, W]]
"""
im = np.dot(im[...,:3], [0.212, 0.7152, 0.0722])
return im
im_greyscale = greyscale(im)
save_im(output_dir.joinpath("lake_greyscale.jpg"), im_greyscale, cmap="gray")
plt.imshow(im_greyscale, cmap="gray")
plt.show()
def inverse(im):
""" Finds the inverse of the greyscale image
Args:
im ([type]): [np.array of shape [H, W]]
Returns:
im ([type]): [np.array of shape [H, W]]
"""
# YOUR CODE HERE
print(im.min, im.max)
for x in range(im.shape[0]):
for y in range(im.shape[1]):
im[x][y] = [1-im[x][y][0], 1-im[x][y][1], 1-im[x][y][2]]
return im
im_inverse = inverse(im)
save_im(output_dir.joinpath("lake_inverse.jpg"), im_inverse, cmap="gray")
plt.imshow(im_inverse, cmap="gray")
plt.show()
```
#### File: A2/assignment2/task4d.py
```python
import skimage
import skimage.io
import skimage.transform
import pathlib
import numpy as np
import utils
import matplotlib.pyplot as plt
image_dir = pathlib.Path("images")
impaths = [
image_dir.joinpath("page1.png"),
image_dir.joinpath("page2.png"),
image_dir.joinpath("page4.png"),
image_dir.joinpath("page6.png"),
image_dir.joinpath("page7.png"),
image_dir.joinpath("page8.png"),
]
def create_binary_image(im):
"""Creates a binary image from a greyscale image "im"
Args:
im ([np.ndarray, np.float]): [An image of shape [H, W] in the range [0, 1]]
Returns:
[np.ndarray, np.bool]: [A binary image]
"""
# START YOUR CODE HERE ### (You can change anything inside this block)
binary_im = np.zeros(im.shape, dtype=np.bool)
fft_im = np.log(np.abs(np.fft.fftshift(np.fft.fft2(im))) + 1)
min_val = np.amin(fft_im)
max_val = np.amax(fft_im)
print("Min:", min_val, "Max:", max_val)
for i in range(len(fft_im[0])):
for j in range(len(fft_im)):
if(fft_im[i][j] > 6):
binary_im[i][j] = 1
'''
plt.figure(figsize=(20, 4))
plt.subplot(1, 2, 1)
plt.imshow(fft_im, cmap="gray")
plt.subplot(1, 2, 2)
plt.imshow(binary_im, cmap="gray")
plt.show()
'''
### END YOUR CODE HERE ###
return binary_im
if __name__ == "__main__":
# NO NEED TO EDIT THE CODE BELOW.
verbose = True
plt.figure(figsize=(4, 12))
plt.tight_layout()
images_to_visualize = []
for i, impath in enumerate(impaths):
im = utils.read_im(str(impath))
im_binary = create_binary_image(im)
assert im_binary.dtype == np.bool, f"Expected the image to be of dtype np.bool, got {im_binary.dtype}"
angles, distances = utils.find_angle(im_binary)
angle = 0
if len(angles) > 0:
angle = angles[0] * 180 / np.pi
print(f"Found angle: {angle:.2f}")
hough_im = utils.create_hough_line_image(im, angles, distances)
rotated = skimage.transform.rotate(im, angle, cval=im.max())
images_to_visualize.extend([im, im_binary, hough_im, rotated])
image = utils.np_make_image_grid(images_to_visualize, nrow=len(impaths))
utils.save_im("task4d.png", image)
plt.imshow(image, cmap="gray")
plt.show()
```
#### File: image_processing/A3/create_submission_zip.py
```python
import os
import zipfile
def query(question, options):
print(question)
to_write = ["\n\t{}: {}".format(key, val) for key, val in options.items()]
to_write = "".join(to_write)
print("Options to select:" + to_write)
answer = None
while answer not in ("yes", "no"):
answer_alternatives = ", ".join([str(key) for key in options.keys()])
answer = input("Select an option [{}]:".format(answer_alternatives))
answer = answer.strip()
if answer not in options.keys():
print("Answer is not in: {}".format(list(options.keys())))
continue
return options[answer]
# If you create other files, edit this list to include them in the .zip file.
files_to_include = {
"task2a": [".py", ".ipynb"],
"task2b": [".py", ".ipynb"],
"task3a": [".py", ".ipynb"],
"task3b": [".py", ".ipynb"],
"task3c": [".py", ".ipynb"],
"task3d": [".py", ".ipynb"],
"utils": [".py"],
}
zipfile_path = "assignment_code.zip"
print("-"*80)
def select_file(filename, extension):
if len(extensions) == 1:
return filename + extensions[0]
options = {str(i): filename + extensions[i] for i in range(len(extensions))}
filename = query("Which file would you like to add?", options)
return filename
files_added = []
with zipfile.ZipFile(zipfile_path, "w") as fp:
for filename, extensions in files_to_include.items():
filepath = select_file(filename, extensions)
assert os.path.isfile(filepath),\
f"Did not find path: {filepath}"
fp.write(filepath)
files_added.append(filepath)
print("-"*80)
print("Files added to zip:")
print("\t" + "\n\t".join(files_added))
print("Zipfile saved to: {}".format(zipfile_path))
print("Please, upload your assignment PDF file outside the zipfile to blackboard.")
``` |
{
"source": "Jorgusia/kolko",
"score": 4
} |
#### File: Jorgusia/kolko/main.py
```python
Plansza_Do_Gry = {'7':' ', '8':' ','9': ' ','4':' ', '5':' ','6': ' ','1':' ', '2':' ','3': ' '}
Klawisze_gry = []
for key in Plansza_Do_Gry:
Klawisze_gry.append(key)
def drukuj_plansze(pole):
print(f"{pole['7']}|{pole['8']}|{pole['9']}")
print("-+-+-")
print(f"{pole['4']}|{pole['5']}|{pole['6']}")
print("-+-+-")
print(f"{pole['1']}|{pole['2']}|{pole['3']}")
drukuj_plansze(Plansza_Do_Gry)
def gra():
gracz = 'X'
licznik = 0
for i in range(10):
drukuj_plansze(Plansza_Do_Gry)
move = input(f'To jest ruch, {gracz}. Wybierz gdzie chcesz postawić znak!')
if Plansza_Do_Gry[move]==' ':
Plansza_Do_Gry[move] = gracz
licznik += 1
else:
print('Miejsce jest ju zajętę. \nWybierz inne miejsce!')
continue
if licznik >= 5:
if Plansza_Do_Gry['7'] == Plansza_Do_Gry['8'] == Plansza_Do_Gry['9'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['4'] == Plansza_Do_Gry['5'] == Plansza_Do_Gry['6'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['1'] == Plansza_Do_Gry['2'] == Plansza_Do_Gry['3'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['1'] == Plansza_Do_Gry['4'] == Plansza_Do_Gry['7'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['2'] == Plansza_Do_Gry['5'] == Plansza_Do_Gry['8'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['3'] == Plansza_Do_Gry['6'] == Plansza_Do_Gry['9'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['1'] == Plansza_Do_Gry['5'] == Plansza_Do_Gry['9'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
elif Plansza_Do_Gry['3'] == Plansza_Do_Gry['5'] == Plansza_Do_Gry['7'] != ' ':
drukuj_plansze(Plansza_Do_Gry)
print('\nKoniec gry')
print(f'WYGRAŁ GRACZ: {gracz}')
break
if licznik == 9:
print('\nKONIEC GRY!!\n')
print('\nJEST REMIS!!\n')
if gracz == 'X':
gracz = 'O'
else:
gracz = 'X'
restart = input('Czy chcesz zagrać ponownie/ (t/n)')
if restart == 't' or restart == 'T':
for key in Klawisze_gry:
Plansza_Do_Gry[key]= ' '
gra()
if __name__=='__main__':
gra()
``` |
{
"source": "jorg-vr/VOP",
"score": 2
} |
#### File: steps/code/links.py
```python
lookup_table = {
'start': '',
'fleets': 'fleets',
'users': 'users',
'clients': 'clients',
}
def generate_link(context, page_description):
page_description = page_description.lower()
return context.root_url + lookup_table[page_description]
``` |
{
"source": "jorgy343/condor",
"score": 3
} |
#### File: casm/parse/Operands.py
```python
class Label:
labels = {}
@classmethod
def get(cls, name):
name = str(name).replace(':', '')
if name in cls.labels:
print('returning cached label {}'.format(name))
return cls.labels[name]
else:
print('creating new label {}'.format(name))
lbl = Label(name)
cls.labels[name] = lbl
return lbl
def __init__(self, name):
self.name = str(name).replace(':', '')
self.pos = 0
def __repr__(self):
return '{}: {}'.format(self.name, self.pos)
class Directive:
def __init__(self, code):
self.code = code
class Immediate:
def __init__(self, val):
self.val = self.convert(val) if isinstance(val, str) else val
def convert(self, val):
if len(val) > 2:
pref = val[0:2]
if pref == '0x':
return int(val[2:], 16)
elif pref == '0b':
return int(val[2:], 2)
return int(val)
def __repr__(self):
return str(self.val)
class Identifier:
def __init__(self, ident):
self.val = str(ident)
def __repr__(self):
return self.val
class Register:
def __init__(self, reg):
self.num = int(reg.replace('r', ''))
def __repr__(self):
return 'r' + str(self.num)
class RegRef:
def __init__(self, reg):
self.num = int(reg.replace('r', ''))
def __repr__(self):
return '[r' + str(self.num) + ']'
``` |
{
"source": "joric/brutus",
"score": 2
} |
#### File: joric/brutus/asynhttp.py
```python
import os
import re
import socket
import string
import sys
import time
import BaseHTTPServer
# async modules
import asyncore
import asynchat
VERSION_STRING = '0.1'
from urllib import unquote, splitquery
# ===========================================================================
# Producers
# ===========================================================================
class simple_producer:
"producer for a string"
def __init__ (self, data, buffer_size=1024):
self.data = data
self.buffer_size = buffer_size
def more (self):
if len (self.data) > self.buffer_size:
result = self.data[:self.buffer_size]
self.data = self.data[self.buffer_size:]
return result
else:
result = self.data
self.data = ''
return result
class chunked_producer:
"producer for http chunked encoding"
def __init__ (self, producer, footers=None):
self.producer = producer
self.footers = footers
def more (self):
if self.producer:
data = self.producer.more()
if data:
return '%x\r\n%s\r\n' % (len(data), data)
else:
self.producer = None
if self.footers:
return string.join (
['0'] + self.footers,
'\r\n'
) + '\r\n\r\n'
else:
return '0\r\n\r\n'
else:
return ''
class composite_producer:
"combine a fifo of producers into one"
def __init__ (self, producers):
self.producers = producers
def more (self):
while len(self.producers):
p = self.producers[0]
d = p.more()
if d:
return d
else:
self.producers.pop(0)
else:
return ''
class globbing_producer:
"""
'glob' the output from a producer into a particular buffer size.
helps reduce the number of calls to send(). [this appears to
gain about 30% performance on requests to a single channel]
"""
def __init__ (self, producer, buffer_size=1<<16):
self.producer = producer
self.buffer = ''
self.buffer_size = buffer_size
def more (self):
while len(self.buffer) < self.buffer_size:
data = self.producer.more()
if data:
self.buffer = self.buffer + data
else:
break
r = self.buffer
self.buffer = ''
return r
class hooked_producer:
"""
A producer that will call <function> when it empties,.
with an argument of the number of bytes produced. Useful
for logging/instrumentation purposes.
"""
def __init__ (self, producer, function):
self.producer = producer
self.function = function
self.bytes = 0
def more (self):
if self.producer:
result = self.producer.more()
if not result:
self.producer = None
self.function (self.bytes)
else:
self.bytes = self.bytes + len(result)
return result
else:
return ''
# ===========================================================================
# Request Object
# ===========================================================================
class http_request:
# default reply code
reply_code = 200
request_counter = 0
# Whether to automatically use chunked encoding when
#
# HTTP version is 1.1
# Content-Length is not set
# Chunked encoding is not already in effect
#
# If your clients are having trouble, you might want to disable this.
use_chunked = 1
# by default, this request object ignores user data.
collector = None
def __init__ (self, *args):
# unpack information about the request
(self.channel, self.request,
self.command, self.uri, self.version,
self.header) = args
self.outgoing = []
self.reply_headers = {
'Server' : 'AsynHTTP/%s' % VERSION_STRING,
'Date' : self.date_time_string()
}
http_request.request_counter += 1
self.request_number = http_request.request_counter
self._split_uri = None
self._header_cache = {}
# from BaseHTTPServer.py
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def date_time_string(self, timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
# --------------------------------------------------
# reply header management
# --------------------------------------------------
def __setitem__ (self, key, value):
self.reply_headers[key] = value
def __getitem__ (self, key):
return self.reply_headers[key]
def has_key (self, key):
return self.reply_headers.has_key (key)
def build_reply_header (self):
return string.join (
[self.response(self.reply_code)] + map (
lambda x: '%s: %s' % x,
self.reply_headers.items()
),
'\r\n'
) + '\r\n\r\n'
# --------------------------------------------------
# split a uri
# --------------------------------------------------
# <path>;<params>?<query>#<fragment>
path_regex = re.compile (
# path params query fragment
r'([^;?#]*)(;[^?#]*)?(\?[^#]*)?(#.*)?'
)
def split_uri (self):
if self._split_uri is None:
m = self.path_regex.match (self.uri)
if m.end() != len(self.uri):
raise ValueError, "Broken URI"
else:
self._split_uri = m.groups()
return self._split_uri
def get_header_with_regex (self, head_reg, group):
for line in self.header:
m = head_reg.match (line)
if m.end() == len(line):
return m.group (group)
return ''
def get_header (self, header):
header = string.lower (header)
hc = self._header_cache
if not hc.has_key (header):
h = header + ': '
hl = len(h)
for line in self.header:
if string.lower (line[:hl]) == h:
r = line[hl:]
hc[header] = r
return r
hc[header] = None
return None
else:
return hc[header]
# --------------------------------------------------
# user data
# --------------------------------------------------
def collect_incoming_data (self, data):
if self.collector:
self.collector.collect_incoming_data (data)
else:
self.log_info(
'Dropping %d bytes of incoming request data' % len(data),
'warning'
)
def found_terminator (self):
if self.collector:
self.collector.found_terminator()
else:
self.log_info (
'Unexpected end-of-record for incoming request',
'warning'
)
def push (self, thing):
if type(thing) == type(''):
self.outgoing.append (simple_producer (thing))
else:
self.outgoing.append (thing)
def response (self, code=200):
short, long = self.responses[code]
self.reply_code = code
return 'HTTP/%s %d %s' % (self.version, code, short)
def error (self, code):
self.reply_code = code
message = self.responses[code][0]
s = self.DEFAULT_ERROR_MESSAGE % {
'code': code,
'message': message,
}
self['Content-Length'] = len(s)
self['Content-Type'] = 'text/html'
# make an error reply
self.push (s)
self.done()
# can also be used for empty replies
reply_now = error
def done (self):
"finalize this transaction - send output to the http channel"
# ----------------------------------------
# persistent connection management
# ----------------------------------------
# --- BUCKLE UP! ----
connection = string.lower (get_header (CONNECTION, self.header))
close_it = 0
wrap_in_chunking = 0
if self.version == '1.0':
if connection == 'keep-alive':
if not self.has_key ('Content-Length'):
close_it = 1
else:
self['Connection'] = 'Keep-Alive'
else:
close_it = 1
elif self.version == '1.1':
if connection == 'close':
close_it = 1
elif not self.has_key ('Content-Length'):
if self.has_key ('Transfer-Encoding'):
if not self['Transfer-Encoding'] == 'chunked':
close_it = 1
elif self.use_chunked:
self['Transfer-Encoding'] = 'chunked'
wrap_in_chunking = 1
else:
close_it = 1
elif self.version is None:
# Although we don't *really* support http/0.9 (because we'd have to
# use \r\n as a terminator, and it would just yuck up a lot of stuff)
# it's very common for developers to not want to type a version number
# when using telnet to debug a server.
close_it = 1
outgoing_header = simple_producer (self.build_reply_header())
if close_it:
self['Connection'] = 'close'
if wrap_in_chunking:
outgoing_producer = chunked_producer (
composite_producer (self.outgoing)
)
# prepend the header
outgoing_producer = composite_producer(
[outgoing_header, outgoing_producer]
)
else:
# prepend the header
self.outgoing.insert(0, outgoing_header)
outgoing_producer = composite_producer (self.outgoing)
# apply a few final transformations to the output
self.channel.push_with_producer (
# globbing gives us large packets
globbing_producer (
# hooking lets us log the number of bytes sent
hooked_producer (
outgoing_producer,
self.log
)
)
)
self.channel.current_request = None
if close_it:
self.channel.close_when_done()
def log_date_string (self, when):
gmt = time.gmtime(when)
if time.daylight and gmt[8]:
tz = time.altzone
else:
tz = time.timezone
if tz > 0:
neg = 1
else:
neg = 0
tz = -tz
h, rem = divmod (tz, 3600)
m, rem = divmod (rem, 60)
if neg:
offset = '-%02d%02d' % (h, m)
else:
offset = '+%02d%02d' % (h, m)
return time.strftime ( '%d/%b/%Y:%H:%M:%S ', gmt) + offset
def log (self, bytes):
sys.stdout.write (
'%s:%d - - [%s] "%s" %d %d\n' % (
self.channel.addr[0],
self.channel.addr[1],
self.log_date_string (time.time()),
self.request,
self.reply_code,
bytes
)
)
responses = BaseHTTPServer.BaseHTTPRequestHandler.responses
# Default error message
DEFAULT_ERROR_MESSAGE = '\r\n'.join ([
'<head>',
'<title>Error response</title>',
'</head>',
'<body>',
'<h1>Error response</h1>',
'<p>Error code %(code)d.',
'<p>Message: %(message)s.',
'</body>',
''
])
# ===========================================================================
# HTTP Channel Object
# ===========================================================================
class http_channel (asynchat.async_chat):
# use a larger default output buffer
ac_out_buffer_size = 1<<16
current_request = None
channel_counter = 0
def __init__ (self, server, conn, addr):
http_channel.channel_counter += 1
self.channel_number = http_channel.channel_counter
self.request_counter = 0
asynchat.async_chat.__init__ (self, conn)
self.server = server
self.addr = addr
self.set_terminator ('\r\n\r\n')
self.in_buffer = ''
self.creation_time = int (time.time())
self.check_maintenance()
def __repr__ (self):
ar = asynchat.async_chat.__repr__(self)[1:-1]
return '<%s channel#: %s requests:%s>' % (
ar,
self.channel_number,
self.request_counter
)
# Channel Counter, Maintenance Interval...
maintenance_interval = 500
def check_maintenance (self):
if not self.channel_number % self.maintenance_interval:
self.maintenance()
def maintenance (self):
self.kill_zombies()
# 30-minute zombie timeout. status_handler also knows how to kill zombies.
zombie_timeout = 30 * 60
def kill_zombies (self):
now = int (time.time())
for channel in asyncore.socket_map.values():
if channel.__class__ == self.__class__:
if (now - channel.creation_time) > channel.zombie_timeout:
channel.close()
# --------------------------------------------------
# send/recv overrides, good place for instrumentation.
# --------------------------------------------------
# this information needs to get into the request object,
# so that it may log correctly.
def send (self, data):
result = asynchat.async_chat.send (self, data)
self.server.bytes_out += len(data)
return result
def recv (self, buffer_size):
try:
result = asynchat.async_chat.recv (self, buffer_size)
self.server.bytes_in += len(result)
return result
except MemoryError:
# --- Save a Trip to Your Service Provider ---
# It's possible for a process to eat up all the memory of
# the machine, and put it in an extremely wedged state,
# where medusa keeps running and can't be shut down. This
# is where MemoryError tends to get thrown, though of
# course it could get thrown elsewhere.
sys.exit ("Out of Memory!")
def handle_error (self):
t, v = sys.exc_info()[:2]
if t is SystemExit:
raise t, v
else:
asynchat.async_chat.handle_error (self)
def log (self, *args):
pass
# --------------------------------------------------
# async_chat methods
# --------------------------------------------------
def collect_incoming_data (self, data):
if self.current_request:
# we are receiving data (probably POST data) for a request
self.current_request.collect_incoming_data (data)
else:
# we are receiving header (request) data
self.in_buffer = self.in_buffer + data
def found_terminator (self):
if self.current_request:
self.current_request.found_terminator()
else:
header = self.in_buffer
self.in_buffer = ''
lines = string.split (header, '\r\n')
# --------------------------------------------------
# crack the request header
# --------------------------------------------------
while lines and not lines[0]:
# as per the suggestion of http-1.1 section 4.1, (and
# <NAME> <<EMAIL>>), ignore a leading
# blank lines (buggy browsers tack it onto the end of
# POST requests)
lines = lines[1:]
if not lines:
self.close_when_done()
return
request = lines[0]
command, uri, version = crack_request (request)
header = join_headers (lines[1:])
# unquote path if necessary (thanks to Skip Montanaro for pointing
# out that we must unquote in piecemeal fashion).
rpath, rquery = splitquery(uri)
if '%' in rpath:
if rquery:
uri = unquote (rpath) + '?' + rquery
else:
uri = unquote (rpath)
r = http_request (self, request, command, uri, version, header)
self.request_counter += 1
self.server.total_requests += 1
if command is None:
self.log_info ('Bad HTTP request: %s' % repr(request), 'error')
r.error (400)
return
# --------------------------------------------------
# handler selection and dispatch
# --------------------------------------------------
for h in self.server.handlers:
if h.match (r):
try:
self.current_request = r
# This isn't used anywhere.
# r.handler = h # CYCLE
h.handle_request (r)
except:
self.server.exceptions += 1
(file, fun, line), t, v, tbinfo = asyncore.compact_traceback()
self.log_info(
'Server Error: %s, %s: file: %s line: %s' % (t,v,file,line),
'error')
try:
r.error (500)
except:
pass
return
# no handlers, so complain
r.error (404)
def writable_for_proxy (self):
# this version of writable supports the idea of a 'stalled' producer
# [i.e., it's not ready to produce any output yet] This is needed by
# the proxy, which will be waiting for the magic combination of
# 1) hostname resolved
# 2) connection made
# 3) data available.
if self.ac_out_buffer:
return 1
elif len(self.producer_fifo):
p = self.producer_fifo.first()
if hasattr (p, 'stalled'):
return not p.stalled()
else:
return 1
# ===========================================================================
# HTTP Server Object
# ===========================================================================
class http_server (asyncore.dispatcher):
SERVER_IDENT = 'HTTP Server (V%s)' % VERSION_STRING
channel_class = http_channel
def __init__ (self, ip, port):
self.ip = ip
self.port = port
asyncore.dispatcher.__init__ (self)
self.create_socket (socket.AF_INET, socket.SOCK_STREAM)
self.handlers = []
self.set_reuse_addr()
self.bind ((ip, port))
# lower this to 5 if your OS complains
self.listen (1024)
host, port = self.socket.getsockname()
if not ip:
self.log_info('Computing default hostname', 'warning')
ip = socket.gethostbyname (socket.gethostname())
try:
self.server_name = socket.gethostbyaddr (ip)[0]
except socket.error:
self.log_info('Cannot do reverse lookup', 'warning')
self.server_name = ip # use the IP address as the "hostname"
self.server_port = port
self.total_clients = 0
self.total_requests = 0
self.exceptions = 0
self.bytes_out = 0
self.bytes_in = 0
self.log_info (
'AsynHTTP (V%s) started at %s'
'\n\tHostname: %s'
'\n\tPort:%d'
'\n' % (
VERSION_STRING,
time.ctime(time.time()),
self.server_name,
port,
)
)
def writable (self):
return 0
def handle_read (self):
pass
def readable (self):
return self.accepting
def handle_connect (self):
pass
def handle_accept (self):
self.total_clients += 1
try:
conn, addr = self.accept()
except socket.error:
# linux: on rare occasions we get a bogus socket back from
# accept. socketmodule.c:makesockaddr complains that the
# address family is unknown. We don't want the whole server
# to shut down because of this.
self.log_info ('warning: server accept() threw an exception', 'warning')
return
except TypeError:
# unpack non-sequence. this can happen when a read event
# fires on a listening socket, but when we call accept()
# we get EWOULDBLOCK, so dispatcher.accept() returns None.
# Seen on FreeBSD3.
self.log_info ('warning: server accept() threw EWOULDBLOCK', 'warning')
return
self.channel_class (self, conn, addr)
def install_handler (self, handler, back=0):
if back:
self.handlers.append (handler)
else:
self.handlers.insert (0, handler)
def remove_handler (self, handler):
self.handlers.remove (handler)
def status (self):
if self.total_clients:
ratio = self.total_requests.as_long() / float(self.total_clients.as_long())
else:
ratio = 0.0
return composite_producer ([
lines_producer (
['<h2>%s</h2>' % self.SERVER_IDENT,
'<br>Listening on: <b>Host:</b> %s' % self.server_name,
'<b>Port:</b> %d' % self.port,
'<p><ul>'
'<li>Total <b>Clients:</b> %s' % self.total_clients,
'<b>Requests:</b> %s' % self.total_requests,
'<b>Requests/Client:</b> %.1f' % ratio,
'<li>Total <b>Bytes In:</b> %s' % self.bytes_in,
'<b>Bytes Out:</b> %s' % self.bytes_out,
'<li>Total <b>Exceptions:</b> %s' % self.exceptions,
'</ul><p>'
'<b>Extension List</b><ul>',
])] + [simple_producer('</ul>')])
CONNECTION = re.compile ('Connection: (.*)', re.IGNORECASE)
# probably stuff in httplib that can do this.
# merge multi-line headers
def join_headers (headers):
r = []
for i in range(len(headers)):
if headers[i][0] in ' \t':
r[-1] = r[-1] + headers[i][1:]
else:
r.append (headers[i])
return r
def get_header (head_reg, lines, group=1):
for line in lines:
m = head_reg.match (line)
if m and m.end() == len(line):
return m.group (group)
return ''
def get_header_match (head_reg, lines):
for line in lines:
m = head_reg.match (line)
if m and m.end() == len(line):
return m
return ''
REQUEST = re.compile ('([^ ]+) ([^ ]+)(( HTTP/([0-9.]+))$|$)')
def crack_request (r):
m = REQUEST.match (r)
if m and m.end() == len(r):
if m.group(3):
version = m.group(5)
else:
version = None
return m.group(1), m.group(2), version
else:
return None, None, None
```
#### File: joric/brutus/webadmin.py
```python
import re
import sys
import zlib
from urllib import splitquery
from urlparse import parse_qs
from cgi import escape
favicon = (
'AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAAAQAAAAAAAAAAAAAAAAA'
'AAAAAAD///8A////AP///wD9/f0A2uXsKbTN3FVFqeHlQqfe6mqhva1bsuLKj8Pfhu/v7w////8A'
'////AP///wD///8A////AP///wD8/f0AabXfuTat7v1lrs26V7Hc0G242LSBxN2cSqvd4E2s3d2K'
'wNKNv9LYR/z8/AH///8A////AP///wDv8/YSk7zSfkir3uJpt9i5ldToh5XU6IeV1OiHldToh5XU'
'6IeV1OiHldToh5TU54esydNh+vr6A////wD///8AYLPgxUKo3uqV1OiHldToh5XU6IeV1OiHldTo'
'h5XU6IeV1OiHldToh5XU6IeV1OiHlNTnh7jP1k////8A/Pz8ATSg2vpqtdW1kM3gipLQ44mV1OiH'
'ldToh5TU54eQzeCKlNTnh5XU6IeV1OiHjcjbjYa/0ZKSzd+G5unqGY7E4ohqsc+0PVdfzQQFBvoE'
'Bgb6OFFY0JXU6IeGwNKSAAAA/5DN4IqV1OiHWX+KtQUGBvoJDQ73UXN+vbjR2VI5pOD2WrLcyz1X'
'X81FYmvHea29mwIDA/2U1OeHhsDSkgAAAP+QzeCKjsvdjAUGB/pql6WqlNPnh4O7zJScx9R1Xq3Y'
'xXnA26Q9V1/NGiYp6Sc3PN4rPkTbldToh4bA0pIAAAD/kM3ginquvpsCAwP9lNPmh5XU6IeV1OiH'
'j8LShmGs1cB9wtygPVdfzSw+RNs7VFvPLD9F25XU6IeGwNKSAAAA/5DN4IqDu8yUAAAA/YjC1JGV'
'<KEY>'
'<KEY>'
'<KEY>'
'<KEY>'
'<KEY>'
'<KEY>////<KEY>'
'AJXH4382quv8VanQzl+028dgtNvEisnekFux2spIq97je7jPnr3R10r6+voD////AP///wD///8A'
'////AP///wD///8A7/HxD7/P10dSruDVPqbg7mSdu7NKrOHecrrirejr7Rf///8A////AP///wD/'
'//8A/B8AAOAPAADgBwAAgAMAAIABAAAAAQAAAAEAAAAAAAAAAAAAAAEAAIABAACAAQAAgAMAAOAH'
'AADwDwAA/B8AAA=='
).decode ('base64')
from __main__ import *
class handler:
def __init__ (self):
self.pending_send = []
def match (self, request):
path, params, query, fragment = request.split_uri()
if path == '/favicon.ico':
return True
else:
return path.startswith ('/admin/')
safe_cmd = re.compile ('[a-z]+')
def handle_request (self, request):
path, params, query, fragment = request.split_uri()
if path == '/favicon.ico':
request['Content-Type'] = 'image/x-icon'
request.push (favicon)
request.done()
else:
parts = path.split ('/')[2:] # ignore ['', 'admin']
subcmd = parts[0]
if not subcmd:
subcmd = 'status'
method_name = 'cmd_%s' % (subcmd,)
if self.safe_cmd.match (subcmd) and hasattr (self, method_name):
method = getattr (self, method_name)
request.push (
'\r\n'.join ([
'<html><head></head>'
'<body>'
'<h1>caesure admin</h1>',
])
)
self.menu (request)
try:
method (request, parts)
except SystemExit:
raise
except:
request.push ('<h1>something went wrong</h1>')
request.push ('<pre>%r</pre>' % (asyncore.compact_traceback(),))
request.push ('<hr>')
self.menu (request)
request.push ('</body></html>')
request.done()
else:
request.error (400)
def menu (self, request):
request.push (
' <a href="/admin/reload">reload</a>'
' <a href="/admin/status">status</a>'
' <a href="/admin/block/">blocks</a>'
' <a href="/admin/wallet/">wallet</a>'
' <a href="/admin/send/">send</a>'
' <a href="/admin/connect/">connect</a>'
' <a href="/admin/shutdown/">shutdown</a>'
)
def cmd_status (self, request, parts):
db = the_block_db
w = the_wallet
RP = request.push
RP ('<h3>last block</h3>')
RP ('hash: %s' % (db.last_block,))
RP ('<br>num: %d' % (db.block_num[db.last_block],))
if len (db.embargo):
RP ('<hr>%d blocks in embargo:' % (len(db.embargo),))
for name in db.embargo.keys():
RP ('<br>%s' % name)
RP ('<hr>')
RP ('<h3>connection</h3>')
RP (escape (repr (bc)))
try:
RP ('<br>here: %s' % (bc.getsockname(),))
RP ('<br>there: %s' % (bc.getpeername(),))
except:
RP ('<br>no connection</br>')
RP ('<h3>wallet</h3>')
if w is None:
RP ('No Wallet')
else:
RP ('total btc: %s' % (bcrepr (w.total_btc),))
def dump_block (self, request, b):
RP = request.push
RP ('\r\n'.join ([
'<br>prev_block: %s' % (hexify (b.prev_block),),
'<br>merkle_root: %s' % (hexify (b.merkle_root),),
'<br>timestamp: %s' % (b.timestamp,),
'<br>bits: %s' % (b.bits,),
'<br>nonce: %s' % (b.nonce,),
]))
RP ('<pre>%d transactions\r\n' % len(b.transactions))
for tx in b.transactions:
self.dump_tx (request, tx)
RP ('</pre>')
def cmd_block (self, request, parts):
db = the_block_db
RP = request.push
if len(parts) == 2:
if parts[1] == 'embargo':
if len(db.embargo):
for name, block in db.embargo.iteritems():
RP ('<hr>%s' % (name,))
self.dump_block (request, unpack_block (block))
else:
RP ('<h3>no blocks in embargo</h3>')
return
elif len(parts[1]):
num = int (parts[1])
else:
num = 0
else:
num = 0
if db.num_block.has_key (num):
b = db[db.num_block[num]]
last_num = db.block_num[db.last_block]
RP ('<br> <a href="/admin/block/0">First Block</a>')
RP (' <a href="/admin/block/%d">Last Block</a><br>' % last_num,)
RP (' <a href="/admin/block/embargo">Embargo</a>')
if num > 0:
RP (' <a href="/admin/block/%d">Prev Block</a>' % (num-1,))
if num < db.block_num[db.last_block]:
RP (' <a href="/admin/block/%d">Next Block</a><br>' % (num+1,))
self.dump_block (request, b)
def dump_tx (self, request, tx):
RP = request.push
RP ('tx: %s\r\n' % (hexify (dhash (tx.render()))))
RP ('inputs: %d\r\n' % (len(tx.inputs)))
for i in range (len (tx.inputs)):
(outpoint, index), script, sequence = tx.inputs[i]
RP ('%3d %s:%d %s %d\r\n' % (i, hexify(outpoint), index, hexify (script), sequence))
RP ('%d outputs\n' % (len(tx.outputs)))
for i in range (len (tx.outputs)):
value, pk_script = tx.outputs[i]
addr = parse_oscript (pk_script)
if not addr:
addr = hexify (pk_script)
RP ('%3d %s %s\n' % (i, bcrepr (value), addr))
RP ('lock_time: %s\n' % tx.lock_time)
def cmd_reload (self, request, parts):
new_hand = reload (sys.modules['webadmin'])
hl = sys.modules['__main__'].h.handlers
for i in range (len (hl)):
if hl[i] is self:
del hl[i]
h0 = new_hand.handler()
# copy over any pending send txs
h0.pending_send = self.pending_send
hl.append (h0)
break
request.push ('<h3>[reloaded]</h3>')
self.cmd_status (request, parts)
def cmd_wallet (self, request, parts):
RP = request.push
w = the_wallet
if not w:
RP ('<h3>no wallet</h3>')
else:
if parts == ['wallet', 'newkey']:
nk = w.new_key()
RP ('<p>New Key: %s</p>' % (nk,))
else:
addrs = w.value.keys()
addrs.sort()
sum = 0
RP ('<p>%d addrs total</p>' % (len(addrs),))
for addr in addrs:
RP ('<dl>')
if len(w.value[addr]):
RP ('<dt>addr: %s</dt>' % (addr,))
for (outpoint, index), value in w.value[addr].iteritems():
RP ('<dd>%s %s:%d</dd>' % (bcrepr (value), outpoint.encode ('hex'), index))
sum += value
RP ('</dl>')
RP ('<br>total: %s' % (bcrepr(sum),))
RP ('<br>unused keys:')
for addr in addrs:
if not len(w.value[addr]):
RP ('<br>%s' % (addr,))
RP ('<p><a href="/admin/wallet/newkey">Make a New Key</a></p>')
def match_form (self, qparts, names):
if len(qparts) != len(names):
return False
else:
for name in names:
if not qparts.has_key (name):
return False
return True
def cmd_connect (self, request, parts):
path, params, query, fragment = request.split_uri()
RP = request.push
if query:
qparts = parse_qs (query[1:])
if self.match_form (qparts, ['host']):
global bc
if bc:
bc.close()
bc = connection (qparts['host'][0])
RP ('<form>'
'IP Address: <input type="text" name="host" value="127.0.0.1"/><br/>'
'<input type="submit" value="Connect"/></form>')
def cmd_send (self, request, parts):
path, params, query, fragment = request.split_uri()
RP = request.push
w = the_wallet
if query:
qparts = parse_qs (query[1:])
if self.match_form (qparts, ['amount', 'addr', 'fee']):
btc = float_to_btc (float (qparts['amount'][0]))
fee = float_to_btc (float (qparts['fee'][0]))
addr = qparts['addr'][0]
try:
_ = address_to_key (addr) # verify it's a real address
except:
RP ('<br><h3>Bad Address: %r</h3>' % escape (addr),)
else:
tx = w.build_send_request (btc, addr, fee)
RP ('<br>send tx:<br><pre>')
self.dump_tx (request, tx)
self.pending_send.append (tx)
RP ('</pre>')
elif self.match_form (qparts, ['cancel', 'index']):
index = int (qparts['index'][0])
del self.pending_send[index]
RP ('<h3>deleted tx #%d</h3>' % (index,))
elif self.match_form (qparts, ['confirm', 'index']):
index = int (qparts['index'][0])
tx = self.pending_send[index]
RP ('<h3>sent tx #%d</h3>' % (index,))
# send it
bc.push (make_packet ('tx', tx.render()))
# forget about it
del self.pending_send[index]
else:
RP ('???')
RP ('<form>'
'Amount to Send: <input type="text" name="amount" /><br/>'
'To Address: <input type="text" name="addr" /><br/>'
'Fee: <input type="text" name="fee" value="0.0005"><br/>'
'<input type="submit" value="Send"/></form>'
'<p>Clicking "Send" will queue up the send request, where it can be examined and either confirmed or cancelled</p>'
'<p>Note: as currently designed, the bitcoin network may not forward transactions without fees, which could result in bitcoins being "stuck". Sending tiny amounts (less than 0.01) requires a fee. This includes the amount left in "change"!</p>'
)
if not self.pending_send:
RP ('<h3>no pending send requests</h3>')
else:
RP ('<h3>pending send requests</h3>')
for i in range (len (self.pending_send)):
RP ('<hr>#%d: <br>' % (i,))
RP ('<pre>')
self.dump_tx (request, self.pending_send[i])
RP ('</pre>')
RP ('<form><input type="hidden" name="index" value="%d">'
'<input type="submit" name="confirm" value="confirm"/>'
'<input type="submit" name="cancel" value="cancel"/>'
'</form>' % (i,))
def cmd_shutdown (self, request, parts):
request.push ('<h3>Shutting down...</h3>')
if the_wallet:
the_wallet.write_value_cache()
import os
os._exit (os.EX_OK)
``` |
{
"source": "JorickvdHoeven/data-registration-tool",
"score": 3
} |
#### File: data-registration-tool/drt/create_environment.py
```python
__all__ = ['create_data_model', 'create_folders', 'create_config', 'create_environment']
# Cell
#export
import sqlalchemy as db
from pathlib import Path
import shutil
import configparser
import drt.data_model as dm
# Cell
def create_data_model( file_name:str = None, location:Path = None):
"""
Creates an SQLite database with the appropriate tables for use in
managing the data intake process.
This will not destroy a pre-existing database file.
##### Parameters
file_name : str, optional
Name of the database file defaults to 'intake_db'
location : Path, optional
Location where to save the db file, by default current working directory
"""
if location is None:
location = Path.cwd()
if file_name is None or file_name == '':
file_name = 'intake_db'
(location / '.config').mkdir(parents=True, exist_ok=True)
db_path = location / '.config' / f'{file_name}.sqlite'
engine = db.create_engine(f'sqlite:///{db_path}', pool_pre_ping=True)
if Path.exists(db_path):
# db already exists return connection
# TODO test that the sqlite database has the right schema if not, raise error
with engine.connect() as conn:
session = db.orm.Session(bind=conn)
session.query(dm.Data_Group).all()
return db_path
dm.Base.metadata.create_all(engine)
return db_path
# Cell
def create_folders(location:Path=None):
"""
Create the folder structure to accept data. Also populate a
helpful readme with instructions on how to use the data intake
process.
##### Parameters
location : Path, optional
The location where to create the folder structure, by default current working directory
"""
if location is None:
location = Path.cwd()
elif not location.exists():
location.mkdir(parents=True)
delivery = '01_Delivery'
raw = '02_RAW'
dataset = '03_Datasets'
(location / delivery).mkdir(exist_ok=True)
(location / raw).mkdir(exist_ok=True)
(location / dataset).mkdir(exist_ok=True)
parent = Path(__file__).resolve().parent
shutil.copy(str(parent / 'templates' / 'data_intake_readme.md'), str(location / 'readme.md'))
return {
'delivery': (location / delivery),
'raw': (location / raw),
'datasets': (location / dataset),
}
# Cell
def create_config(location:Path, db_path:Path, folders:dict) -> Path:
"""
Create a new default configuration file with the paths set to the
default paths.
##### Parameters
location : Path
Location of data intake project
db_path : Path
Location of the data intake database
folders : dict
Dictionary with the folder paths for 'delivery', 'raw', and 'datasets'
"""
if location is None:
location = Path.cwd()
cfg = configparser.ConfigParser()
d = dict()
d['root_data'] = location
d['delivery_folder'] = folders['delivery']
d['raw_data_folder'] = folders['raw']
d['datasets_folder'] = folders['datasets']
d['data_intake_db'] = db_path
cfg['PATHS'] = d
d = dict()
d['force_recalculate'] = False
cfg['FLAGS'] = d
d = dict()
d['data_extensions'] = '\n'.join(["data", "parquet", "hdf5"])
d['report_extensions'] = '\n'.join(['report','md','html','pptx','docx'])
d['script_extensions'] = '\n'.join(['script', 'ipynb', 'py', 'r', 'jl', 'sh'])
cfg['EXTENSIONS'] = d
if not (location / '.config' / 'config.ini').is_file():
with open((location / '.config' / 'config.ini'), mode='w') as f:
cfg.write(f)
else:
print('[!] config.ini already exists, using existing version')
return (location / '.config' / 'config.ini')
# Cell
def create_environment(location:Path = None) -> Path:
"""
Stands up a data intake environment at the given location.
##### Parameters
location : Path, optional
Location to create the environment, defaults to current working directory.
"""
if location is None:
location = Path.cwd()
(location / '.config').mkdir(parents=True, exist_ok=True)
db_path = create_data_model(location=location)
folders = create_folders(location=location)
return create_config(location, db_path, folders)
```
#### File: data-registration-tool/drt/registration.py
```python
__all__ = ['extract_delivery_from_script', 'register_data_group', 'register_data_folder', 'register_all']
# Cell
from re import L
from .environment import DataIntakeEnv
import drt.utils as utils
import drt.receipt as rct
import itertools
import drt.data_model as dm
from pathlib import Path
from typing import Union
from .utils import Data_Groups_Type
# Cell
def extract_delivery_from_script(env:DataIntakeEnv, folder:Path) -> str:
"""
TODO:
[summary]
##### Parameters
env : DataIntakeEnv
[description]
folder : Path
[description]
##### Returns
str
[description]
"""
# load script data
# get all registered delivery names
# search for delivery names in script
# return the delivery name which is the source
# error if there is more than one source
files = [fil for fil in folder.rglob("*")
if fil.suffix[1:] in env.script_extension_list
and fil.is_file()
]
text = ''
for fil in files:
with open(fil, mode='rt') as f:
text = text + f.read()
sources = []
for delivery in env.session.query(dm.Delivery).all():
if delivery.name in text:
sources.append(delivery)
if len(sources) > 1:
print(f"[!!] Error loading source for {folder}, too many sources" )
return None
elif len(sources) == 0:
print(f"[!!] Error no registered source found for {folder}")
return None
else:
return sources[0]
# Cell
def register_data_group(env:DataIntakeEnv, folder:Path, group_type: Data_Groups_Type, record: Data_Groups_Type = None):
"""
Register a single data group to the database.
##### Parameters
env : DataIntakeEnv
The application environment settings.
folder : Path
The full path to the folder of the data group to register. Relative paths won't work.
group_type : str
The group type to put it in the proper table.
record : dm.Data_Group, optional
If this is a pre-existing record and you have it provide it here, by default None
##### Raises
TypeError
group_type must be one of ['delivery', 'raw_data', 'dataset']
"""
dg = group_type
if not record:
record = env.session.query(dg).filter_by(name = folder.name).first()
try:
data = utils.process_data_group(folder, dg)
except FileNotFoundError:
print(f"[!] {folder} not processed, empty or non-existent")
return False
if group_type == dm.Raw_Data or type(record) == dm.Raw_Data:
raw_data_source = extract_delivery_from_script(env, folder)
# Set the system fields based on the metadata collected
if record:
# we have an existing record create it
dg = record
dg.type = data['type']
dg.name = data['name']
dg.last_update = data['last_update']
dg.size = data['size']
dg.num_files = data['num_files']
dg.group_hash= data['group_hash']
dg.group_last_modified= data['group_last_modified']
if type(record) == dm.Raw_Data:
dg.source = raw_data_source
# we need to create a new record
elif group_type == dm.Delivery:
dg = dm.Delivery(**data)
env.session.add(dg)
elif group_type == dm.Raw_Data:
dg =dm.Raw_Data(**data, source=raw_data_source)
env.session.add(dg)
elif group_type == dm.Dataset:
dg = dm.Dataset(**data)
env.session.add(dg)
else:
raise TypeError
env.session.commit()
return True
# Cell
def register_data_folder(env:DataIntakeEnv, group_type: Data_Groups_Type, force:bool = False):
"""
Scans a folder containing data groups and registers them if they
don't already exist in the database.
##### Parameters
env : DataIntakeEnv
The environment with data intake process pathnames.
group_type : str
The type of folder to scan, delivery, raw_data, or dataset.
force : bool, optional
If we force we ignore the current data and regenerate all stats. This will
overwrite previous stats.
##### Raises
ValueError
The right type must be passed.
"""
# get folder list, if not delivery folder then skip "In_Progress_*"
if group_type == dm.Delivery:
root_folder = env.delivery_folder
folder_list = [fil
for fil in root_folder.iterdir()
if fil.is_dir() and not fil.name.startswith(".")]
elif group_type == dm.Raw_Data:
root_folder = env.raw_data_folder
folder_list = [fil
for fil in root_folder.iterdir()
if not (fil.name.startswith(".") or fil.name.startswith("In_Progress"))
and fil.is_dir()]
elif group_type == dm.Dataset: # group_type == 3
root_folder = env.dataset_folder
folder_list = [fil
for fil in root_folder.iterdir()
if not (fil.name.startswith(".") or fil.name.startswith("In_Progress"))
and fil.is_dir()]
else:
raise ValueError
# Process new folders and add them to the database.
data_group_list = env.get_data_group_list(group_type)
known_folders = [ item.name for item in data_group_list ]
new_folders = list(set([f.name for f in folder_list]) - set(known_folders))
process_folders = list(zip(new_folders, itertools.repeat(None)))
if force:
process_folders.extend([(item.name, item) for item in data_group_list if (root_folder / item.name).is_dir()])
for folder, record in process_folders:
folder = root_folder / folder
registered = register_data_group(env, folder, group_type, record)
if force:
rct.write_receipt(env, folder)
elif registered:
rct.sync_data_group(env, folder)
rct.write_receipt(env, folder)
# Cell
def register_all(env:DataIntakeEnv):
"""
Register all new data groups in the data intake process environment.
This on purpose, ignores In_Progress and . files.
##### Parameters
env : DataIntakeEnv
The data intake process environment to scan and register new data groups.
"""
[ register_data_folder(env, i, env.force_recalculate) for i in [dm.Delivery, dm.Raw_Data, dm.Dataset] ]
```
#### File: data-registration-tool/drt/tests.py
```python
__all__ = ['create_test_folder']
# Cell
#export
from nbdev.showdoc import *
from .create_environment import *
from .environment import DataIntakeEnv
from .registration import register_all
from .receipt import sync_data_folder
from .verification import check_datagroup
import drt.data_model as dm
from pathlib import Path
import shutil
import drt
import re
from zipfile import ZipFile
try:
import importlib.resources as pkg_resources
except ImportError:
# Try backported to PY<37 `importlib_resources`.
import importlib_resources as pkg_resources
# Cell
def create_test_folder(test_path:Path):
"""
Create a folder in a location with dummy data ready to be registered
"""
if test_path.exists() and test_path.is_dir():
shutil.rmtree(test_path)
create_environment(test_path)
# TODO : Turn this into a zip extraction to avoid interference
(test_path/'01_Delivery').rmdir()
(test_path/'02_RAW').rmdir()
(test_path/'03_Datasets').rmdir()
with pkg_resources.path(drt, 'test_files') as test:
with ZipFile(test/'zipped_test.zip', 'r') as zipObj:
zipObj.extractall(test_path)
# shutil.copytree((test/'01_Delivery'), (test_path / '01_Delivery'),dirs_exist_ok=True)
# shutil.copytree((test/'02_RAW'), (test_path / '02_RAW'),dirs_exist_ok=True)
# shutil.copytree((test/'03_Datasets'), (test_path / '03_Datasets'),dirs_exist_ok=True)
env = DataIntakeEnv(test_path / '.config' / 'config.ini')
register_all(env)
for data_folder in test_path.iterdir():
if data_folder.is_dir():
for data_group in data_folder.iterdir():
if data_group.name[0] not in ['.','_']:
if data_folder.name == '01_Delivery':
if (data_group/'receipt.rst').exists():
with open(data_group/'receipt.rst', 'rt') as recpt:
text = recpt.read()
text = re.sub('(.*\n:Date Received: )None(.*)', '\\1 2020-01-04\\2', text, re.MULTILINE)
text = re.sub('(.*\n:Received from: )None(.*)', '\\1Dummy Recepient\\2',text, re.MULTILINE)
text = re.sub('(.*\nDescription:\n-+\n)None(.*)', '\\1Lorem ipsum Dolor sit Amet\\2', text, re.MULTILINE)
with open(data_group/'receipt.rst', 'wt') as recpt:
recpt.write(text)
else:
if (data_group/'receipt.rst').exists():
with open(data_group/'receipt.rst', 'rt') as recpt:
text = recpt.read()
text = re.sub('(.*\nDescription:\n-+\n)None(.*)', '\\1Lorem ipsum Dolor sit Amet\\2', text, re.MULTILINE)
text = re.sub('(.*\nReport\n-+\n)None(\n.*)', '\\1Dummy HTML Report\\2', text, re.MULTILINE)
with open(data_group/'receipt.rst', 'wt') as recpt:
recpt.write(text)
[sync_data_folder(env,group_type ) for group_type in [dm.Delivery, dm.Raw_Data, dm.Dataset] ]
```
#### File: data-registration-tool/drt/utils.py
```python
__all__ = ['Data_Groups_Type', 'hash_files', 'process_data_group', 'count_data_group_components']
# Cell
#export
from pathlib import Path
from typing import List
import hashlib
from tqdm.auto import tqdm
from datetime import datetime
import pandas as pd
import numpy as np
import drt.data_model as dm
import typing
# Cell
Data_Groups_Type = typing.Union[dm.Delivery, dm.Raw_Data, dm.Dataset]
# Cell
def hash_files(file_list: List[str],
block_size: int = 10_485_760,
progressbar_min_size: int = 10_737_400_000) -> str:
"""
Takes a list of path objects and returns the SHA256 hash of
the files in the list. If any of the objects are not file objects,
this will crash. Ignores any files called 'receipt.rst' as those are
considered data intake files and not part of the work.
##### Parameters
__file_list__ : List[str]
List of strings denoting files to be hashed, the strings
must all be valid files or this method will throw a
ValueError exception.
__block_size__ : int, optional
Block size in bytes to read from disk a good generic
value is 10MB as most files are smaller than 10MB and
it means we can load whole files in at a time when
hashing, defaults to 10_485_760 (10MB).
__progressbar_min_size__ : int, optional
Minimum size a file needs to be to get its
own progress bar during processing. Default was chosen
to work well on an SSD reading 400 MB/s, defaults
to 10_737_400_000 (10GB).
##### Returns
__str__
A string representation of the SHA256 hash of the files
provided in the file list.
##### Raises
__ValueError__
The strings passed in the file_list need to be valid file objects
in the file system. Currently only windows and Posix filesystems
are supported. This may change in future.
"""
# sort the file list to always give a consistent result. Order matters.
file_list = sorted(file_list)
file_list_hash = hashlib.sha256() # Create the hash object for the list
# loop through all the files in the list updating the hash object
for fil in tqdm(file_list, leave=False, unit="files"):
file_progress_bar = False
if not Path(fil).exists():
raise FileNotFoundError("Strings in the file_list must be a valid path in the filesystem")
elif Path(fil).is_dir():
raise ValueError("Strings in the file_list must be a valid files not folders")
size = fil.stat().st_size
if size > progressbar_min_size:
file_progress_bar = True
pbar = tqdm(total=fil.stat().st_size, unit="bytes", unit_scale=True, leave=False)
else:
pbar = [] # else only here to get rid of unbound warning
# Read data from file in block_size chunks and update the folder hash function
with open(fil, "rb") as f:
fb = f.read(block_size)
while len(fb) > 0:
file_list_hash.update(fb) # Update the file list hash
fb = f.read(block_size) # Read the next block from the file
if file_progress_bar:
pbar.update(block_size)
if file_progress_bar:
pbar.close()
return file_list_hash.hexdigest()
# Cell
def process_data_group(folder:Path, type:str, light:bool = False) -> dict:
"""
Return the system fields for a data group folder.
If the data group is a delivery type, then this only looks at the
data folder in it, if it is any other type it looks at the whole folder.
##### Parameters
folder : Path
The location to get metadata for.
type : DataIntakeEnv
The type of data group. ['delivery', 'raw_data', 'dataset']
light : bool, optional
If set skip the hashing
##### Returns
dict
A dict of the following five metadata elements calculated:
- name : Name of the folder of the data group
- type : The type of the data group processed
- last_update : The current date and time
- size : The size of the data on disk
- num_files : The number of data files.
- group_hash : A SHA256 hash of all the data in the folder
- group_last_modified : The maximum date of created, and modified for all files
"""
if type == dm.Delivery:
data_folder = folder / 'data'
else:
data_folder = folder
# check for non-existent or empty folder
if not data_folder.exists():
raise FileNotFoundError
try:
next((data_folder).glob("**/*"))
except StopIteration:
# folder is empty can't process it
raise FileNotFoundError
# Get file sizes, last modified dates, and names to count,
# sum size, and hash the file data provided
file_sizes, file_modified_dates, file_metamodified_dates, file_names = zip(
*[
(f.stat().st_size, f.stat().st_mtime, f.stat().st_ctime, f)
for f in (data_folder).glob("**/*")
if f.is_file() and f.name != 'receipt.rst'
]
)
last_modified = datetime.fromtimestamp(
max(max(file_modified_dates),
max(file_metamodified_dates)))
# Hash the files in the delivery
if light:
folder_hash = 'skipped'
else:
folder_hash = hash_files(file_names)
dg = {
'name' : folder.name,
'type' : type.__name__,
'last_update' : datetime.now(),
'size' : sum(file_sizes),
'num_files' : len(file_sizes),
'group_hash' : folder_hash,
'group_last_modified' : last_modified,
}
return dg
# Cell
def count_data_group_components( data_group: Path,
data_extensions: list,
report_extensions: list,
script_extensions: list,
):
"""
A utility method to analyze a folder to determine which data
it contains and whether those have the three requisite elements,
generation script, data, and report. It relies on certain
conventions about the folder which must be followed:
1. Each data respresentation is stored in a folder, files in
the root of the passed folder will be ignored.
2. Folders starting with "In_Progress" or "." will be ignored.
3. In each data representation folder there are three entries
more won't cause an error but should be avoided
4. Report types have extensions:
['report','md','html','pptx','docx', ...]
with the initial report extension added to a folder containing
report files if there is more than 1 report file needed.
5. Data types have extensions:
['data','parquet','hdf5', ...]
with the initial data extension being used for folders in the
declaration which allows the data to be spread over multiple
files.
6. Script types have extensions:
['script','ipynb','py','r','jl','sh', ...]
Where the first extension can be applied to a folder if more
than one file was needed to process the data.
This analyzer will look only for the extensions listed and report
how many of each of the types of files/folders exist in the root
of the provided folder.
##### Parameters
folder : Path
A folder containing folders of data representations
TODO add extension list parameters
##### Returns
pd.DataFrame
A table listing all the data representations which appear
in the root of the folder.
"""
element_count ={
'data':0,
'report':0,
'script':0
}
# For each Raw data file extract count the number of each data elements it has
for fil in data_group.iterdir():
if not fil.name.startswith('.'):
if fil.suffix in data_extensions:
element_count['data'] += 1
if fil.suffix in report_extensions:
element_count['report'] += 1
if fil.suffix in script_extensions:
element_count['script'] += 1
return element_count
``` |
{
"source": "joricomico/COVID19_trial",
"score": 2
} |
#### File: joricomico/COVID19_trial/core_legacy.py
```python
_DEBUG = 0
from inspect import isfunction, ismethod, isgeneratorfunction, isgenerator, isroutine
from inspect import isabstract, isclass, ismodule, istraceback, isframe, iscode, isbuiltin
from inspect import ismethoddescriptor, isdatadescriptor, isgetsetdescriptor, ismemberdescriptor
from inspect import isawaitable, iscoroutinefunction, iscoroutine
from datetime import timedelta as _time
from datetime import datetime
from collections.abc import Iterable as iterable
from pickle import dump, load
def some(field):
''' returns True if value is not None or pointing to an empty set; therefore 0, True and False return True '''
return field is not None and field != [] and field != {} and field != () and field != ''
def no(field):
''' returns False if value is not None or pointing to an empty set; therefore 0, True and False return False '''
return not some(field)
class clonable:
def __init__(clonable, **sets): clonable.__dict__.update(sets)
def _clonable(get): return get.__dict__.copy()
def _meta(data): return data._clonable()
def clone(_): return type(_)(**_._clonable())
def set(object, **fields):
for field in fields: setattr(object, field, fields[field])
@property
def sets(of): return sorted(list(set(dir(of)) - set(dir(type(of)))))
class gcomparable:
def _compare(a, b):
if type(a) != type(b): return False
if a.__dict__ == b.__dict__: return True
return False
def __eq__(a, b): return a._compare(b)
class gprintable:
_lines_ = 31
_chars_ = 13
_ellipsis_ = '...'
def _repr(my, value):
_type = ''.join(''.join(str(type(value)).split('class ')).split("'"))
_value = '{}'.format(value)
if len(_value)>my._chars_:
show = int(my._chars_/2)
_value = _value[:show]+my._ellipsis_+_value[-show:]
return '{} {}'.format(_type, _value)
class struct(clonable, gcomparable, gprintable):
@staticmethod
def _from(data):
if value(data).inherits(struct): return struct(**data._clonable())
elif hasattr(data, '__dict__'): return struct(**data.__dict__)
return value(data)
def _default(field, name, value):
try: return getattr(field, name)
except: setattr(field, name, value)
return value
def all(object, *fields): return [getattr(object, field) for field in fields if field in object.__dict__]
def get(object, field):
if field in object.sets: return getattr(object, field)
return None
def _check(these, args, by=lambda x:x):
def match(this, item, value): return item in this.sets and by(this.get(item)) == value
return all([match(these, _, args[_]) for _ in args])
def clear(_, *fields):
if no(fields): fields = _.sets
for field in [field for field in fields if hasattr(_,field) and not ismethod(getattr(_, field))]: delattr(_, field)
@property
def tokens(_): return ((k,_.get(k)) for k in _.sets)
def __repr__(self):
if not hasattr(self, '_preprint'): return struct(_preprint='', _lines=self._lines_, data=self).__repr__()
pre, repr = self._preprint, ''
for n,i in ni(self.data):
if self._lines == 0: break
else: self._lines -= 1
repr += pre+'{}: '.format(n)
if issubclass(type(i), struct): repr += '\n'+struct(_preprint=pre+'\t', _lines=self._lines, data = i).__repr__()
else: repr += self._repr(i)
repr += '\n'
return repr
class recordable(clonable):
@staticmethod
def load(filename):
with open(filename, 'rb') as file: return load(file)
def _to_base(_, value): return value
def _parse_to_base(_):
clonable = _._clonable()
for field in clonable:
if issubclass(type(field), recordable): clonable[field] = clonable[field]._parse_to_base()
else: clonable[field] = _._to_base(clonable[field])
return type(_)(**clonable)
def _predump(_): pass
def save(data, filename, to_base=False):
if to_base: _ = data._parse_to_base()
data._predump()
with open(filename, 'wb') as file: dump(data, file)
class value(recordable, gcomparable, gprintable):
data = None
_check = dict(
isfunction=isfunction, ismethod=ismethod, isgeneratorfunction=isgeneratorfunction, isgenerator=isgenerator, isroutine=isroutine,
isabstract=isabstract, isclass=isclass, ismodule=ismodule, istraceback=istraceback, isframe=isframe, iscode=iscode, isbuiltin=isbuiltin,
ismethoddescriptor=ismethoddescriptor, isdatadescriptor=isdatadescriptor, isgetsetdescriptor=isgetsetdescriptor, ismemberdescriptor=ismemberdescriptor,
isawaitable=isawaitable, iscoroutinefunction=iscoroutinefunction, iscoroutine=iscoroutine
)
def __init__(this, token, **meta):
this.data = token
this.__dict__.update({k:v(token) for k,v in this._check.items()})
super().__init__(**meta)
@property
def type(_): return type(_.data)
def inherits(_, *types): return issubclass(_.type, types)
@property
def isstruct(_): return _.inherits(struct)
@property
def isbaseiterable(_): return _.inherits(tuple, list, dict, set) or _.isgenerator or _.isgeneratorfunction
@property
def isiterable(_): return isinstance(_.data, iterable) and _.type is not str
def _clone_iterable(_):
if _.inherits(dict): return _.data.copy()
elif _.isgenerator or _.isgeneratorfunction: return (i for i in list(_.data))
else: return type(_.data)(list(_.data)[:])
def _clonable(_): return {k:v for k,v in _.__dict__.items() if k not in _._check}
def _meta(data): return {k:v for k,v in data._clonable().items() if k != 'data'}
def clone(_):
data = _.data
if _.isiterable: data = _._clone_iterable()
elif _.inherits(clonable): data = _.data.clone()
return type(_)(data)
def __enter__(self): self._instance = self; return self
def __exit__(self, type, value, traceback): self._instance = None
def __repr__(self):
if not hasattr(self, '_preprint'): return value(self.data, _preprint='', _lines=value(value._lines_)).__repr__()
if self.isbaseiterable:
pre, repr = self._preprint, ''
for n,i in ni(self.data):
if self._lines.data == 0: break
else: self._lines.data -= 1
index, item = str(n), i
if self.inherits(dict): index += ' ({})'.format(str(i)); item = self.data[i]
repr += pre+'{}: '.format(index)
next = value(item, _preprint=pre+'\t', _lines=self._lines)
if next.isiterable: repr += '\n'
repr += next.__repr__()
repr += '\n'
return repr
elif self.inherits(clonable): return value(self.data._clonable(), _preprint=self._preprint, _lines=self._lines).__repr__()
else: return self._repr(self.data)
this = value
def meta(data):
if this(data).inherits(clonable): return data._meta()
return struct._from(data)._meta()
def get(opt, key, default=None, share=False):
if key in opt:
if not share: return opt.pop(key)
return opt[key]
return default
def ni(list):
if this(list).isiterable:
for n,i in enumerate(list): yield n,i
elif this(list).isstruct:
for n,i in list.tokens: yield n,i
else: yield None, list
class at(struct):
DAY, HOUR, MIN = 86400, 3600, 60
def __init__(_, dtime=None, **sets):
super().__init__(**sets)
if some(dtime) and issubclass(type(dtime), _time): _._time = dtime
else:
d,h,m,s,ms = _._default('d',0), _._default('h',0), _._default('m',0), _._default('s',0), _._default('ms',0)
if not any([d,h,m,s,ms]): now=datetime.now(); _._time = now-datetime(now.year, now.month, now.day)
else: _._time = _time(days=d, hours=h, minutes=m, seconds=s, milliseconds=ms)
_.clear('d','h','m','s','ms')
def __sub__(_, dtime):
of=type(dtime); sets=_._clonable()
if issubclass(of, _time): return at(_._time-dtime, **sets)
elif issubclass(of, at): sets.update(dtime._clonable()); return at(_._time-dtime._time, **sets)
def __add__(_, dtime):
of=type(dtime); sets=_._clonable()
if issubclass(of, _time): return at(_._time+dtime, **sets)
elif issubclass(of, at): sets.update(dtime._clonable()); return at(_._time+dtime._time, **sets)
def __str__(_): return str(_._time)
@property
def seconds(_): return _._time.seconds
@property
def S(_): return _.seconds
@property
def minutes(_): return _._time.seconds/60
@property
def M(_): return _.minutes
@property
def hours(_): return _.minutes/60
@property
def H(_): return _.hours
@property
def days(_): return _._time.days
@property
def D(_): return _.days
@staticmethod
def zero(): return at(_time())
if _DEBUG:
print(some(None), some(0), some(True), some(False), some([]), some(tuple()))
print(no(None), no(0), no(True), no(False), no([]), no(tuple()))
``` |
{
"source": "JorijnInEemland/MyPythonProjects",
"score": 4
} |
#### File: MyPythonProjects/projects/tictactoe.py
```python
class TicTacToe:
'''Tic Tac Toe happens here'''
def __init__(self, size):
'''Generate a board of the given width and height'''
self.size = size
self.board = [str(i) for i in list(range(self.size * self.size))]
self.player = 'X'
self.stop = False
def display(self):
'''Print the board to the console'''
for line in range(0, self.size * self.size, self.size):
row = self.board[line : line + self.size]
print(row)
def turn(self):
'''Make the player choose a position, fill it in on the board, swap who's playing'''
position = input(f"Player {self.player}, choose a position: ")
if position == 'exit':
self.stop = True
return False
if position not in self.board:
print(f"Can't choose this position, try again")
return True
self.board[int(position)] = self.player
self.player = 'X' if self.player == 'O' else 'O'
def check(self):
'''Check whether the game has been won'''
winner = None
if all(i in {'X', 'O'} for i in self.board):
winner = 'TIE'
for row in range(0, self.size * self.size, self.size):
row = self.board[row : row + self.size]
if all(i == 'X' for i in row):
winner = 'X'
elif all(i == 'O' for i in row):
winner = 'O'
for col in range(0, self.size):
col = self.board[col :: self.size]
if all(i == 'X' for i in col):
winner = 'X'
elif all(i == 'O' for i in col):
winner = 'O'
dia = self.board[:: self.size + 1]
if all(i == 'X' for i in dia):
winner = 'X'
if all(i == 'O' for i in dia):
winner = 'O'
dia = self.board[self.size - 1 : self.size * self.size - 1 : self.size - 1]
if all(i == 'X' for i in dia):
winner = 'X'
if all(i == 'O' for i in dia):
winner = 'O'
if winner in {'X', 'O'}:
print(f"Player {winner} won!")
return True
elif winner == 'TIE':
print(f"The game was a tie!")
return True
def start(self):
'''Start the game of tic tac toe'''
while True:
self.display()
if self.check():
break
while(self.turn()):
pass
if self.stop:
self.stop = False
break
def reset(self):
self.board = [str(i) for i in list(range(self.size * self.size))]
self.player = 'X'
tictactoe = TicTacToe(3)
tictactoe.start()
``` |
{
"source": "jorijnsmit/pygsheets",
"score": 3
} |
#### File: pygsheets/pygsheets/drive.py
```python
from pygsheets.spreadsheet import Spreadsheet
from pygsheets.worksheet import Worksheet
from pygsheets.custom_types import ExportType
from pygsheets.exceptions import InvalidArgumentValue, CannotRemoveOwnerError, RequestError
from googleapiclient import discovery
from googleapiclient.http import MediaIoBaseDownload
from googleapiclient.errors import HttpError
import logging
import json
import os
import re
"""
pygsheets.drive
~~~~~~~~~~~~~~
This module provides wrappers for the Google Drive API v3.
"""
PERMISSION_ROLES = ['organizer', 'owner', 'writer', 'commenter', 'reader']
PERMISSION_TYPES = ['user', 'group', 'domain', 'anyone']
FIELDS_TO_INCLUDE = 'files(id, name, parents), nextPageToken, incompleteSearch'
_EMAIL_PATTERN = re.compile(r"\"?([-a-zA-Z0-9.`?{}]+@[-a-zA-Z0-9.]+\.\w+)\"?")
class DriveAPIWrapper(object):
"""A simple wrapper for the Google Drive API.
Various utility and convenience functions to support access to Google Drive files. By default the
requests will access the users personal drive. Use enable_team_drive(team_drive_id) to connect to a
TeamDrive instead.
Only functions used by pygsheet are wrapped. All other functionality can be accessed through the service
attribute.
See `reference <https://developers.google.com/drive/v3/reference/>`__ for details.
:param http: HTTP object to make requests with.
:param data_path: Path to the drive discovery file.
"""
def __init__(self, http, data_path, retries=3, logger=logging.getLogger(__name__)):
try:
with open(os.path.join(data_path, "drive_discovery.json")) as jd:
self.service = discovery.build_from_document(json.load(jd), http=http)
except:
self.service = discovery.build('drive', 'v3', http=http)
self.team_drive_id = None
self.include_team_drive_items = True
"""Include files from TeamDrive when executing requests."""
self.logger = logger
self._spreadsheet_mime_type_query = "mimeType='application/vnd.google-apps.spreadsheet'"
self.retries = retries
def enable_team_drive(self, team_drive_id):
"""Access TeamDrive instead of the users personal drive."""
self.team_drive_id = team_drive_id
def disable_team_drive(self):
"""Do not access TeamDrive (default behaviour)."""
self.team_drive_id = None
def get_update_time(self, file_id):
"""Returns the time this file was last modified in RFC 3339 format."""
return self._execute_request(self.service.files().get(fileId=file_id, fields='modifiedTime'))['modifiedTime']
def list(self, **kwargs):
"""
Fetch metadata of spreadsheets. Fetches a list of all files present in the users drive
or TeamDrive. See Google Drive API Reference for details.
Reference: `Files list request <https://developers.google.com/drive/v3/reference/files/list>`__
:param kwargs: Standard parameters (see documentation for details).
:return: List of metadata.
"""
result = list()
response = self._execute_request(self.service.files().list(**kwargs))
result.extend(response['files'])
while 'nextPageToken' in response:
kwargs['pageToken'] = response['nextPageToken']
response = self._execute_request(self.service.files().list(**kwargs))
result.extend(response['files'])
if 'incompleteSearch' in response and response['incompleteSearch']:
self.logger.warning('Not all files in the corpora %s were searched. As a result '
'the response might be incomplete.', kwargs['corpora'])
return result
def spreadsheet_metadata(self, query='', only_team_drive=False):
"""Fetch spreadsheet titles, ids & and parent folder ids.
The query string can be used to filter the returned metadata.
Reference: `search parameters docs. <https://developers.google.com/drive/v3/web/search-parameters>`__
:param query: Can be used to filter the returned metadata.
"""
if query:
query = query + ' and ' + str(self._spreadsheet_mime_type_query)
else:
query = self._spreadsheet_mime_type_query
if self.team_drive_id:
result = self.list(corpora='teamDrive',
teamDriveId=self.team_drive_id,
supportsTeamDrives=True,
includeTeamDriveItems=True,
fields=FIELDS_TO_INCLUDE,
q=query)
if not result and not only_team_drive:
result = self.list(fields=FIELDS_TO_INCLUDE,
supportsTeamDrives=True,
includeTeamDriveItems=self.include_team_drive_items,
q=query)
return result
else:
return self.list(fields=FIELDS_TO_INCLUDE,
supportsTeamDrives=True,
includeTeamDriveItems=self.include_team_drive_items,
q=query)
def delete(self, file_id, **kwargs):
"""Delete a file by ID.
Permanently deletes a file owned by the user without moving it to the trash. If the file belongs to a
Team Drive the user must be an organizer on the parent. If the input id is a folder, all descendants
owned by the user are also deleted.
Reference: `delete request <https://developers.google.com/drive/v3/reference/files/delete>`__
:param file_id: The Id of the file to be deleted.
:param kwargs: Standard parameters (see documentation for details).
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
self._execute_request(self.service.files().delete(fileId=file_id, **kwargs))
def move_file(self, file_id, old_folder, new_folder, **kwargs):
"""Move a file from one folder to another.
Requires the current folder to delete it.
Reference: `update request <https://developers.google.com/drive/v3/reference/files/update>`_
:param file_id: ID of the file which should be moved.
:param old_folder: Current location.
:param new_folder: Destination.
:param kwargs: Optional arguments. See reference for details.
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
self._execute_request(self.service.files().update(fileId=file_id, removeParents=old_folder,
addParents=new_folder, **kwargs))
def copy_file(self, file_id, title, folder, **kwargs):
"""
Copy a file from one location to another
Reference: `update request`_
:param file_id: Id of file to copy.
:param title: New title of the file.
:param folder: New folder where file should be copied.
:param kwargs: Optional arguments. See reference for details.
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
body = {'name': title, 'parents': [folder]}
return self._execute_request(self.service.files().copy(fileId=file_id, body=body, **kwargs))
def _export_request(self, file_id, mime_type, **kwargs):
"""The export request."""
return self.service.files().export(fileId=file_id, mimeType=mime_type, **kwargs)
def export(self, sheet, file_format, path='', filename=''):
"""Download a spreadsheet and store it.
Exports a Google Doc to the requested MIME type and returns the exported content.
.. warning::
This can at most export files with 10 MB in size!
Uses one or several export request to download the files. When exporting to CSV or TSV each worksheet is
exported into a separate file. The API cannot put them into the same file. In this case the worksheet index
is appended to the file-name.
Reference: `request <https://developers.google.com/drive/v3/reference/files/export>`__
:param sheet: The spreadsheet or worksheet to be exported.
:param file_format: File format (:class:`ExportType`)
:param path: Path to where the file should be stored. (default: current working directory)
:param filename: Name of the file. (default: Spreadsheet Id)
"""
request = None
tmp = None
mime_type, file_extension = getattr(file_format, 'value', file_format).split(':')
if isinstance(sheet, Spreadsheet):
if (file_format == ExportType.CSV or file_format == ExportType.TSV) and len(sheet.worksheets()) > 1:
for worksheet in sheet:
self.export(worksheet, file_format, path=path, filename=filename + str(worksheet.index))
return
else:
request = self._export_request(sheet.id, mime_type)
elif isinstance(sheet, Worksheet):
if sheet.index != 0:
tmp = sheet.index
try:
sheet.index = 0
except HttpError:
raise Exception("Can only export first sheet in readonly mode")
request = self._export_request(sheet.spreadsheet.id, mime_type)
import io
file_name = str(sheet.id or tmp) + file_extension if filename is None else filename + file_extension
fh = io.FileIO(path + file_name, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
# logging.info('Download progress: %d%%.', int(status.progress() * 100)) TODO fix this
logging.info('Download finished. File saved in %s.', path + file_name)
if tmp is not None:
sheet.index = tmp + 1
if isinstance(sheet, Worksheet):
sheet.refresh(False)
def create_permission(self, file_id, role, type, **kwargs):
"""Creates a permission for a file or a TeamDrive.
See `reference <https://developers.google.com/drive/v3/reference/permissions/create>`__ for more details.
:param file_id: The ID of the file or Team Drive.
:param role: The role granted by this permission.
:param type: The type of the grantee.
:keyword emailAddress: The email address of the user or group to which this permission refers.
:keyword domain: The domain to which this permission refers.
:parameter allowFileDiscovery: Whether the permission allows the file to be discovered through search. This is
only applicable for permissions of type domain or anyone.
:keyword expirationTime: The time at which this permission will expire (RFC 3339 date-time). Expiration
times have the following restrictions:
* They can only be set on user and group permissions
* The time must be in the future
* The time cannot be more than a year in the future
:keyword emailMessage: A plain text custom message to include in the notification email.
:keyword sendNotificationEmail: Whether to send a notification email when sharing to users or groups.
This defaults to true for users and groups, and is not allowed for other
requests. It must not be disabled for ownership transfers.
:keyword supportsTeamDrives: Whether the requesting application supports Team Drives. (Default: False)
:keyword transferOwnership: Whether to transfer ownership to the specified user and downgrade
the current owner to a writer. This parameter is required as an acknowledgement
of the side effect. (Default: False)
:keyword useDomainAdminAccess: Whether the request should be treated as if it was issued by a
domain administrator; if set to true, then the requester will be granted
access if they are an administrator of the domain to which the item belongs.
(Default: False)
:return: `Permission Resource <https://developers.google.com/drive/v3/reference/permissions#resource>`_
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
if 'emailAddress' in kwargs and 'domain' in kwargs:
raise InvalidArgumentValue('A permission can only use emailAddress or domain. Do not specify both.')
if role not in PERMISSION_ROLES:
raise InvalidArgumentValue('A permission role can only be one of ' + str(PERMISSION_ROLES) + '.')
if type not in PERMISSION_TYPES:
raise InvalidArgumentValue('A permission role can only be one of ' + str(PERMISSION_TYPES) + '.')
body = {
'kind': 'drive#permission',
'type': type,
'role': role
}
if 'emailAddress' in kwargs:
body['emailAddress'] = kwargs['emailAddress']
del kwargs['emailAddress']
elif 'domain' in kwargs:
body['domain'] = kwargs['domain']
del kwargs['domain']
if 'allowFileDiscovery' in kwargs:
body['allowFileDiscovery'] = kwargs['allowFileDiscovery']
del kwargs['allowFileDiscovery']
if 'expirationTime' in kwargs:
body['expirationTime'] = kwargs['expirationTime']
del kwargs['expirationTime']
return self._execute_request(self.service.permissions().create(fileId=file_id, body=body, **kwargs))
def list_permissions(self, file_id, **kwargs):
"""List all permissions for the specified file.
See `reference <https://developers.google.com/drive/v3/reference/permissions/list>`__ for more details.
:param file_id: The file to get the permissions for.
:keyword pageSize: Number of permissions returned per request. (Default: all)
:keyword supportsTeamDrives: Whether the application supports TeamDrives. (Default: False)
:keyword useDomainAdminAccess: Request permissions as domain admin. (Default: False)
:return: List of `Permission Resources <https://developers.google.com/drive/v3/reference/permissions#resource>`_
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
# Ensure that all fields are returned. Default is only id, type & role.
if 'fields' not in kwargs:
kwargs['fields'] = '*'
permissions = list()
response = self._execute_request(self.service.permissions().list(fileId=file_id, **kwargs))
permissions.extend(response['permissions'])
while 'nextPageToken' in response:
response = self._execute_request(self.service.permissions().list(fileId=file_id,
pageToken=response['nextPageToken'], **kwargs))
permissions.extend(response['permissions'])
return permissions
def delete_permission(self, file_id, permission_id, **kwargs):
"""Deletes a permission.
See `reference <https://developers.google.com/drive/v3/reference/permissions/delete>`__ for more details.
:param file_id: The ID of the file or Team Drive.
:param permission_id: The ID of the permission.
:keyword supportsTeamDrives: Whether the requesting application supports Team Drives. (Default: false)
:keyword useDomainAdminAccess: Whether the request should be treated as if it was issued by a
domain administrator; if set to true, then the requester will be
granted access if they are an administrator of the domain to which
the item belongs. (Default: false)
"""
if 'supportsTeamDrives' not in kwargs and self.team_drive_id:
kwargs['supportsTeamDrives'] = True
try:
self._execute_request(self.service.permissions().delete(fileId=file_id, permissionId=permission_id, **kwargs))
except HttpError as error:
self.logger.exception(str(error))
if re.search(r'The owner of a file cannot be removed\.', str(error)):
raise CannotRemoveOwnerError('The owner of a file cannot be removed!')
else:
raise
def _execute_request(self, request):
"""Executes a request.
:param request: The request to be executed.
:return: Returns the response of the request.
"""
return request.execute(num_retries=self.retries)
``` |
{
"source": "jorikemppi/larkstongue",
"score": 3
} |
#### File: larkstongue/larkstongue/classes.py
```python
class assetRaw:
def __init__(self, name, bitmap, data, size):
self.name = name
self.bitmap = bitmap
self.data = data
self.size = size
class assetCompressed:
def __init__(self, name, type, data, scanlineWidth, size, offset, method):
self.name = name
self.type = type
self.data = data
self.scanlineWidth = scanlineWidth
self.size = size
self.offset = offset
self.method = method
class node:
def __init__(self, freq, word, left, right):
self.freq = freq
self.word = word
self.left = left
self.right = right
```
#### File: larkstongue/compress/huffman.py
```python
from classes import node
def huffmanCompress(input):
output = ""
freqTable = []
for i in range(0, 256):
freqTable.append([i, 0])
for i in range(0, len(input), 2):
byteInHex = input[i:i+2]
byteInDec = int(byteInHex, 16)
freqTable[byteInDec][1] += 1
freqTable = sorted(freqTable, key=lambda l: l[1], reverse=True)
nodeTable = []
for charFrequency in freqTable:
if charFrequency[1] > 0:
nodeTable.append(node(charFrequency[1], charFrequency[0], None, None))
while len(nodeTable) > 1:
left = nodeTable.pop()
right = nodeTable.pop()
nodeTable.append(node(left.freq + right.freq, None, left, right))
nodeTable = sorted(nodeTable, key=lambda node: node.freq, reverse=True)
codeTable = []
for i in range(0,256):
codeTable.append(None)
global outputBinary
outputBinary = ""
def huffmanCompressRecursion(node, bits):
global outputBinary
if node.word == None:
outputBinary = outputBinary + "0"
huffmanCompressRecursion(node.left, bits+"0")
huffmanCompressRecursion(node.right, bits+"1")
else:
outputBinary = outputBinary + "1" + format(node.word, "08b")
codeTable[node.word] = bits
huffmanCompressRecursion(nodeTable[0], "")
messageBinary = ""
for i in range(0, len(input), 2):
byteInHex = input[i:i+2]
byteInDec = int(byteInHex, 16)
messageBinary = messageBinary + codeTable[byteInDec]
outputBinary = outputBinary + messageBinary
padLength = 4 - (len(outputBinary) + 2) % 4
if padLength == 4:
padLength = 0
padString = format(padLength, "02b")
outputBinary = padString + outputBinary
while len(outputBinary) % 4 != 0:
outputBinary = outputBinary + "0"
for i in range(0, len(outputBinary), 4):
binarySlice = outputBinary[i:i+4]
nibbleValue = int(binarySlice, 2)
nibbleHex = format(nibbleValue, "x")
output = output + nibbleHex
return output
def huffmanDecompress(input):
global output
output = ""
inputBinary = ""
for char in input:
inputBinary = inputBinary + format(int(char, 16), "04b")
global nodeTree
nodeTree = node(None, None, None, None)
global bitIndex
bitIndex = 1
def huffmanRebuildTree(nod):
global nodeTree
global bitIndex
bitIndex += 1
if inputBinary[bitIndex] == "0":
nod.left = node(None, None, None, None)
huffmanRebuildTree(nod.left)
nod.right = node(None, None, None, None)
huffmanRebuildTree(nod.right)
else:
wordBinary = inputBinary[bitIndex+1:bitIndex+9]
wordDec = int(wordBinary, 2)
wordHex = format(wordDec, "02x")
nod.word = wordHex
bitIndex += 8
huffmanRebuildTree(nodeTree)
def huffmanDecompressRecursion(nod):
global nodeTree
global bitIndex
global output
if nod.word != None:
output = output + nod.word
else:
bitIndex += 1
if inputBinary[bitIndex] == "0":
huffmanDecompressRecursion(nod.left)
else:
huffmanDecompressRecursion(nod.right)
padLengthBinary = inputBinary[:2]
padLength = int(padLengthBinary, 2)
while bitIndex < len(inputBinary) - padLength - 1:
huffmanDecompressRecursion(nodeTree)
return output
``` |
{
"source": "jorik/rusty-hog",
"score": 3
} |
#### File: rusty-hog/scripts/s3weblisting_secret_monitor.py
```python
import os
import gzip
import pprint
import re
import requests
import tempfile
import sys
import subprocess
import json
import logging
import xml.etree.ElementTree as ET
import htmllistparse
import time
import urllib.parse
import copy
from datetime import datetime
loglevel = "WARNING"
for arg in sys.argv:
if arg.startswith("--log="):
loglevel = arg[6:]
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError("Invalid log level: %s" % loglevel)
logging.basicConfig(level=numeric_level)
# initialize auth tokens, fail if not present
DOWNLOAD_CONFIG_PATH = os.environ["DOWNLOAD_CONFIG_PATH"]
INSIGHTS_INSERT_KEY = os.environ["INSIGHTS_INSERT_KEY"]
INSIGHTS_ACCT_ID = os.environ["INSIGHTS_ACCT_ID"]
DUROC_HOG_PATH = os.environ["DUROC_HOG_PATH"]
# config file format: [ { "url": string, "regex": string, "name": string, "recursive": bool } ... ]
# example: [ { "url":"https://download.newrelic.com/php_agent/release/", "regex":".*\\.tar\\.gz", "name":"PHP Agent", "recursive": false} ]
f_j = open(DOWNLOAD_CONFIG_PATH, "r")
config = json.load(f_j)
output_array = []
def scan_binary(file_url, content_item, config_item):
logging.debug(f"scan_binary({file_url}, {content_item}, {config_item}")
output_array = []
r = requests.get(file_url)
tempdir = tempfile.gettempdir()
filename = os.path.basename(urllib.parse.urlparse(file_url).path)
tempfile_path = os.path.join(tempdir, filename)
f = open(tempfile_path, "wb")
f.write(r.content)
f.close()
duroc_hog_output = subprocess.run(
[DUROC_HOG_PATH, "-z", tempfile_path], capture_output=True, check=True
)
json_output = json.loads(duroc_hog_output.stdout)
os.remove(tempfile_path)
for finding in json_output:
output_array.append(
{
"eventType": "s3weblisting_secret_monitor",
"reason": finding["reason"],
"path": finding["path"],
"url": file_url,
"filename": filename,
"name": config_item['name'],
}
)
return output_array
def scan_endpoint(config_item):
endpoint = config_item['endpoint']
regex = re.compile(config_item['regex'])
name = config_item['name']
recursive = config_item['recursive']
prefixes = config_item['prefixes']
after_date = datetime.fromisoformat(config_item['after_date'])
logging.debug(f"scan_endpoint({config_item}")
output_array = []
ns = {'aws': 'http://s3.amazonaws.com/doc/2006-03-01/'}
for prefix in prefixes:
url = f"https://{endpoint}.s3.amazonaws.com/?delimiter=/&prefix={prefix}"
et_root = None
try:
et_root = ET.fromstring(requests.get(url).text)
except:
logging.error(f"ET.fromstring(requests.get({url}).text) returned an exception")
for content_item in et_root.findall('aws:Contents', ns):
# logging.debug(f"content_item: {content_item}")
# logging.debug(f"content_item.find('aws:Key', ns): {content_item.find('aws:Key', ns)}")
key = content_item.find('aws:Key', ns).text
size = int(content_item.find('aws:Size', ns).text)
modified = datetime.fromisoformat(content_item.find('aws:LastModified', ns).text.replace('Z', '+00:00'))
if regex.search(key) and size > 0 and modified > after_date:
file_url = f"https://{endpoint}.s3.amazonaws.com/{key}"
output_array.extend(scan_binary(file_url, content_item, config_item))
if recursive:
new_config_item = copy.deepcopy(config_item)
new_prefixes = [content_item[0].text for content_item in et_root.findall('aws:CommonPrefixes', ns)]
if len(new_prefixes) > 0:
new_config_item['prefixes'] = new_prefixes
output_array.extend(scan_endpoint(new_config_item))
return output_array
output_array = [result for config_item in config for result in scan_endpoint(config_item)]
# for config_item in config:
# output_array.extend(scan_url(config_item))
url = "https://insights-collector.newrelic.com/v1/accounts/{INSIGHTS_ACCT_ID}/events"
headers = {
"Content-Type": "application/json",
"X-Insert-Key": INSIGHTS_INSERT_KEY,
"Content-Encoding": "gzip",
}
post = gzip.compress(json.dumps(output_array).encode("utf-8"))
logging.info(f"len(output_array) = {len(output_array)}")
logging.debug(output_array)
logging.info("Submitting data to New Relic Insights...")
r = requests.post(url, data=post, headers=headers)
logging.info(f"insights status code: {r.status_code}")
#
``` |
{
"source": "JORIM1981/Flash-Cards",
"score": 3
} |
#### File: Flash-Cards/flashcards/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from django.urls import reverse
# Create your models here.
class Deck(models.Model):
title = models.CharField(max_length=200)
subject = models.CharField(max_length=200)
description = models.TextField(max_length=500, blank=True)
creator = models.ForeignKey(User, on_delete=models.CASCADE, related_name="creator")
def __str__(self):
return self.title
def get_absolute_url(self):
"""Returns the canonical URL for a deck."""
return reverse('flashcards:create_cards', args=[str(self.id)])
class Card(models.Model):
question = models.CharField(max_length=200)
answer = models.CharField(max_length=200)
deck = models.ForeignKey(Deck, on_delete=models.CASCADE)
def __str__(self):
return self.question
class StudySet(models.Model):
student = models.ForeignKey(User, on_delete=models.CASCADE, related_name="student", default='')
class StudentDeck(models.Model):
studyset = models.ForeignKey(StudySet, on_delete=models.CASCADE, related_name="studyset")
student = models.ForeignKey(User, on_delete=models.CASCADE)
deck = models.ForeignKey(Deck, on_delete=models.CASCADE)
class FlipBox(models.Model):
frequency = models.SmallIntegerField()
def __str__(self):
return str(self.frequency)
class ScoreBoard(models.Model):
student = models.ForeignKey(StudySet, on_delete=models.CASCADE)
card = models.ForeignKey(Card, on_delete=models.CASCADE)
box = models.ForeignKey(FlipBox, on_delete=models.CASCADE)
updated = models.DateField()
Flip_date = models.DateField()
def __str__(self):
return str(self.card)
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.