metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "123Powerful/csmetrics.org",
"score": 3
} |
#### File: csmetrics.org/app/mag_search.py
```python
import os, json, requests, time, csv
import http.client, urllib.request, urllib.parse, urllib.error, base64
cur_path = os.path.dirname(os.path.abspath(__file__))
MAS_URL_PREFIX = "https://api.labs.cognitive.microsoft.com"
headers = {
# Request headers
'Ocp-Apim-Subscription-Key': '' # api key required
}
def query_academic_search(type, url, query):
if type == "get":
response = requests.get(url, params=urllib.parse.urlencode(query), headers=headers)
elif type == "post":
response = requests.post(url, json=query, headers=headers)
if response.status_code != 200:
print("return statue: " + str(response.status_code))
print("ERROR: problem with the request.")
print(response.content)
#exit()
return json.loads((response.content).decode("utf-8"))
def get_instituion(instname):
url = os.path.join(MAS_URL_PREFIX, "academic/v1.0/interpret")
data = query_academic_search("get", url, {"query": instname})
# print(data)
interpret_expr = data["interpretations"][0]["rules"][0]["output"]["value"]
# print(interpret_expr)
normalized_name = interpret_expr.split("\'")[-2]
# print(instname, normalized_name)
return normalized_name
def replaceParentGrid():
old_inst_file = os.path.join(cur_path, "data/inst_fullname.csv")
new_inst_file = os.path.join(cur_path, "data/inst_fullname_grid.csv")
grid_rel_file = os.path.join(cur_path, "data/parent_relations.csv")
reader = csv.reader(open(grid_rel_file))
next(reader) # skip the first line
grid_relations = {r[0]:r[2] for r in reader}
reader = csv.reader(open(old_inst_file))
writer = csv.writer(open(new_inst_file, "w"))
for r in reader:
print(r, len(r))
if len(r) < 3:
writer.writerow([r[0], r[1], "", "", ""])
else:
writer.writerow([r[0], r[1], grid_relations[r[2]] if r[2] in grid_relations else r[2], r[3] if len(r) > 3 else "", r[4] if len(r) > 4 else ""])
def merge_grid_institutions():
gridMap = dict()
reader = csv.reader(open("data/grid.csv"))
next(reader) # skip the first line
gridMap = {r[0].strip():(r[1].strip(),r[2].strip()) for r in reader}
reader = csv.reader(open("data/grid_types.csv"))
next(reader)
gridType = {r[0]:r[1] for r in reader}
instInfo = {}
csvfile = open('data/inst_full_clean.csv', 'w', newline='')
spamwriter = csv.writer(csvfile, delimiter=',')
reader = csv.reader(open("data/inst_fullname_grid.csv"))
next(reader)
for r in reader:
instInfo[r[0]] = {
"fullname": r[1].strip(),
"grid": r[2].strip(),
"url": r[3].strip(),
"wiki": r[4].strip()
}
grid = instInfo[r[0]]["grid"]
# print(r[0], instInfo[r[0]]["grid"], gridType[instInfo[r[0]]["grid"]])
instInfo[r[0]]["country"] = gridMap[grid][0] if grid in gridMap else ""
instInfo[r[0]]["continent"] = gridMap[grid][1] if grid in gridMap else "Other"
instInfo[r[0]]["type"] = gridType[grid] if grid in gridType else "Other"
for k,v in instInfo.items():
spamwriter.writerow([k] + [v["fullname"],v["type"],v["continent"],v["country"],v["url"] if v["url"] != "" else v["wiki"]])
def clean_inst():
# inst_alias clean
# instfile = open("inst_alias.csv")
# reader = csv.reader(instfile, delimiter=',')
# with open('inst_alias_clean.csv', 'w', newline='') as csvfile:
# spamwriter = csv.writer(csvfile, delimiter=',')
# for r in reader:
# if len(r) == 2:
# spamwriter.writerow(r)
# else:
# spamwriter.writerow([r[0], "{}".format(','.join(r[1:]))])
# change csv format for inst_fullname
instfile = open("data/inst_fullname")
reader = csv.reader(instfile, delimiter='\t')
with open('data/inst_fullname.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',')
for r in reader:
spamwriter.writerow(r)
def gen_inst_alias(instName):
instNameAlias = {}
# print("instName", sorted(list(instName))[:300])
ind = 1
for n in sorted(list(instName)):
try:
key = get_instituion(n)
print(n, "-->", key, "{}/{}".format(ind, len(instName)))
if key in instNameAlias:
instNameAlias[key].append(n)
else:
instNameAlias[key] = [n]
time.sleep(0.5)
except Exception as exct:
print(" ERROR:", n)
ind += 1
with open('inst_alias.csv', 'w', newline='') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
for k, v in instNameAlias.items():
spamwriter.writerow([k] + [alias for alias in v])
if __name__ == '__main__':
# clean_inst()
replaceParentGrid()
merge_grid_institutions()
``` |
{
"source": "123prashanth123/Fault-Detection-System",
"score": 3
} |
#### File: Fault-Detection-System/Application Building Blocks/RTApp.py
```python
import cv2
import torch
import utils as u
# ******************************************************************************************************************** #
# Inference Helper
def __help__(frame=None, model=None, fea_extractor=None, show_prob=True, pt1=None, pt2=None):
"""
frame : Current frame being processed
model : Siamese Network Model
fea_extractor : Feature Extraction Model
show_prob : Flag to control whether to display the similarity score
pt1 : Start Point of the Reference Bounding Box
pt2 : End Point of the Reference Bounding Box
"""
disp_frame = frame.copy()
# Center Crop + Resize
frame = u.preprocess(frame, change_color_space=False)
# Perform Inference on current frame
with torch.no_grad():
features = u.normalize(fea_extractor(u.FEA_TRANSFORM(frame).to(u.DEVICE).unsqueeze(dim=0)))
y_pred = torch.sigmoid(model(features))[0][0].item()
# Prediction > Upper Bound -----> Match
# Lower Bound <= Prediction <= Upper Bound -----> Possible Match
# Prediction < Lower Bound -----> No Match
if show_prob:
if y_pred >= u.upper_bound_confidence:
cv2.putText(img=disp_frame, text="Match, {:.5f}".format(y_pred), org=(25, 75),
fontScale=1, fontFace=cv2.FONT_HERSHEY_SIMPLEX,
color=u.CLI_GREEN, thickness=2)
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_GREEN, thickness=2)
elif u.lower_bound_confidence <= y_pred <= u.upper_bound_confidence:
cv2.putText(img=disp_frame, text="Possible Error, {:.5f}".format(y_pred), org=(25, 75),
fontScale=1, fontFace=cv2.FONT_HERSHEY_SIMPLEX,
color=u.CLI_ORANGE, thickness=2)
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_ORANGE, thickness=2)
else:
cv2.putText(img=disp_frame, text="No Match, {:.5f}".format(y_pred), org=(25, 75),
fontScale=1, fontFace=cv2.FONT_HERSHEY_SIMPLEX,
color=u.CLI_RED, thickness=2)
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_RED, thickness=2)
else:
if y_pred >= u.upper_bound_confidence:
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_GREEN, thickness=2)
elif u.lower_bound_confidence <= y_pred <= u.upper_bound_confidence:
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_ORANGE, thickness=2)
else:
cv2.rectangle(img=disp_frame,
pt1=(int(pt1[0]) - u.RELIEF, int(pt1[1]) - u.RELIEF), pt2=(int(pt2[0]) + u.RELIEF, int(pt2[1]) + u.RELIEF),
color=u.CLI_RED, thickness=2)
return disp_frame
# ******************************************************************************************************************** #
"""
Video Handling
"""
```
#### File: Feature Extractions/Feature Extraction and Comparison/Snapshot.py
```python
import os
import cv2
import platform
import utils as u
# ******************************************************************************************************************** #
def capture_snapshot():
# Setting up capture Object
if platform.system() != "Windows":
cap = cv2.VideoCapture(u.ID)
else:
cap = cv2.VideoCapture(u.ID, cv2.CAP_DSHOW)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, u.CAM_WIDTH)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, u.CAM_HEIGHT)
cap.set(cv2.CAP_PROP_FPS, u.FPS)
u.breaker()
count = len(os.listdir(u.IMAGE_PATH)) + 1
# Read Data from capture object
while cap.isOpened():
_, frame = cap.read()
# Preprocess frame with CLAHE (clipLimit:2, tileGridSize: (2, 2))
frame = u.clahe_equ(frame)
# Display the frame
cv2.imshow("Feed", frame)
# Press 'c' to Capture the frame
if cv2.waitKey(1) == ord("c"):
cv2.imwrite(os.path.join(u.IMAGE_PATH, "Snapshot_{}.png".format(count)), frame)
print("Captured Snapshot - {}".format(count))
count += 1
# Press 'q' to Quit
if cv2.waitKey(1) == ord("q"):
break
u.breaker()
# Release capture object and destory all windows
cap.release()
cv2.destroyAllWindows()
# ******************************************************************************************************************** #
```
#### File: Feature Extractions/Feature Extraction and Comparison/utils.py
```python
import os
import cv2
from termcolor import colored
os.system("color")
# LineBreaker
def breaker(num=50, char="*"):
print(colored("\n" + num*char + "\n", color="magenta"))
# Custom Print Function
def myprint(text, color, on_color=None):
print(colored(text, color=color, on_color=on_color))
# CLAHE Preprocessing (Cliplimit: 2.0, TileGridSize: (2, 2))
def clahe_equ(image):
clahe = cv2.createCLAHE(clipLimit=2, tileGridSize=(2, 2))
for i in range(3):
image[:, :, i] = clahe.apply(image[:, :, i])
return image
# Center Crop (Resize to 256x256, then center crop the 224x224 region)
def preprocess(image, change_color_space=True):
if change_color_space:
image = cv2.cvtColor(src=image, code=cv2.COLOR_BGR2RGB)
image = cv2.resize(src=image, dsize=(256, 256), interpolation=cv2.INTER_AREA)
h, w, _ = image.shape
cx, cy = w // 2, h // 2
return image[cy - 112:cy + 112, cx - 112:cx + 112, :]
# Setting up self-aware Image Directory
IMAGE_PATH = os.path.join(os.path.dirname(__file__), "Images")
if not os.path.exists(IMAGE_PATH):
os.makedirs(IMAGE_PATH)
# Webcam Feed Attributes
CAM_WIDTH, CAM_HEIGHT, FPS, ID = 640, 360, 30, 0
```
#### File: Feature Extractions/ORB In-Depth Analysis/Analysis.py
```python
import os
import cv2
import platform
import numpy as np
import random as r
import matplotlib.pyplot as plt
import utils as u
#####################################################################################################
# Function to get ORB Features from an image.
def get_orb_features(orb, image):
"""
orb: ORB Object
image: (np.ndarray) image data
"""
image = cv2.cvtColor(src=image, code=cv2.COLOR_RGB2GRAY)
kp, des = orb.detectAndCompute(image, None)
return kp, des
# Fucntion to return topK keypoints. TopK keypoints are decided by their response
def get_topK(kp, K=5):
"""
kp: List of ORB Keypoints
K: Top K keypoints to use (Default: 5)
"""
responses = []
for k in kp:
responses.append(k.response)
if len(responses) > K:
responses_idx = np.argpartition(responses, -K)[-K:].astype("int64")
topK_kp = []
for idx in responses_idx:
topK_kp.append(kp[idx])
return topK_kp
else:
return None
# Image Analysis tool showing various keypoint attributes for a random keypoint
def show_kp_info(kp):
kp_sample = r.randint(0, len(kp)-1)
u.breaker()
print("Total Number of Keypoints : {}".format(len(kp)))
u.breaker()
print("Keypoint {} Information".format(kp_sample))
u.breaker()
print("Angle : {:.5f}".format(kp[kp_sample].angle))
print("Octave : {:.5f}".format(kp[kp_sample].octave))
print("Point : {}".format(kp[kp_sample].pt))
print("Response : {:.5f}".format(kp[kp_sample].response))
print("Size : {:.5f}".format(kp[kp_sample].size))
# Image Analysis tool showing various keypoint attributes the entire list of keypoints
def show_info(kp):
angles = []
octaves = []
points = []
responses = []
sizes = []
for k in kp:
angles.append(k.angle)
octaves.append(k.octave)
points.append(k.pt)
responses.append(k.response)
sizes.append(k.size)
x_Axis = np.arange(1, len(kp)+1)
plt.figure()
plt.subplot(1, 4, 1)
plt.plot(x_Axis, angles, "r")
plt.grid()
plt.title("Angles")
plt.subplot(1, 4, 2)
plt.plot(x_Axis, octaves, "r")
plt.grid()
plt.title("Octaves")
plt.subplot(1, 4, 3)
plt.plot(x_Axis, responses, "r")
plt.grid()
plt.title("Responses")
plt.subplot(1, 4, 4)
plt.plot(x_Axis, sizes, "r")
plt.grid()
plt.title("Sizes")
plt.show()
# Display the Keypoint image
def show_kp_image(image, kp):
"""
image: (np.ndarray) Image data
kp: (list) List of ORB Keypoints
"""
kp_image = cv2.drawKeypoints(image, kp, None, (0, 255, 0), cv2.DRAW_MATCHES_FLAGS_DEFAULT)
plt.figure()
plt.imshow(kp_image)
plt.axis("off")
plt.show()
#####################################################################################################
# Function to handle image analysis
def image_analysis(name, nfeatures, K):
# Read Image file
image = cv2.cvtColor(src=cv2.imread(os.path.join(u.IMAGE_PATH, name)), code=cv2.COLOR_BGR2RGB)
# Create ORB object
orb = cv2.ORB_create(nfeatures=nfeatures)
# Obtain image keypoints
kp, _ = get_orb_features(orb, image)
# Show Random Keypoint Information
show_kp_info(kp)
# Show all keypoint information
show_info(kp)
# Get topK Keypoints
topK_kp = get_topK(kp, K)
# Show image with all the keypoints
show_kp_image(image, kp)
# Show image with topK keypoints
show_kp_image(image, topK_kp)
#####################################################################################################
def realtime_analysis(nfeatures, K=None):
# Create ORB object
orb = cv2.ORB_create(nfeatures=nfeatures)
# Setting up capture object
if platform.system() != "Windows":
cap = cv2.VideoCapture(u.ID)
else:
cap = cv2.VideoCapture(u.ID, cv2.CAP_DSHOW)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, u.CAM_WIDTH)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, u.CAM_HEIGHT)
cap.set(cv2.CAP_PROP_FPS, u.FPS)
# Creat CLAHE object
clahe = cv2.createCLAHE(clipLimit=5, tileGridSize=(2, 2))
while cap.isOpened():
_, frame = cap.read()
# Preprocess frame with CLAHE (clipLimit:2, tileGridSize: (2, 2))
for i in range(frame.shape[-1]):
frame[:, :, i] = clahe.apply(frame[:, :, i])
# frame = cv2.GaussianBlur(src=frame, ksize=(15, 15), sigmaX=0)
# Obtain frame keypoints
kp, _ = get_orb_features(orb, frame)
# Get topK Keypoints if K is specified
if K is None:
frame = cv2.drawKeypoints(frame, kp, None, (0, 255, 0), cv2.DRAW_MATCHES_FLAGS_DEFAULT)
else:
topK_kp = get_topK(kp, K=K)
if topK_kp is not None:
frame = cv2.drawKeypoints(frame, topK_kp, None, (0, 255, 0), cv2.DRAW_MATCHES_FLAGS_DEFAULT)
# Press 'q' to Quit
cv2.imshow("Feed", frame)
if cv2.waitKey(1) == ord("q"):
break
# Release capture cbject and destory all windows
cap.release()
cv2.destroyAllWindows()
#####################################################################################################
```
#### File: OSD/One Shot Detectors with Edges/Snapshot.py
```python
Shot Detectors with Edges/Snapshot.py<gh_stars>0
import os
import cv2
import platform
import numpy as np
import utils as u
def capture_snapshot():
# Setting up capture object
if platform.system() != "Windows":
cap = cv2.VideoCapture(u.ID)
else:
cap = cv2.VideoCapture(u.ID, cv2.CAP_DSHOW)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, u.CAM_WIDTH)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, u.CAM_HEIGHT)
cap.set(cv2.CAP_PROP_FPS, u.FPS)
count = len(os.listdir(u.IMAGE_PATH)) + 1
u.breaker()
# Read data from capture object
while cap.isOpened():
_, frame = cap.read()
# Obtain the edges
frame = u.AUGMENT(images=np.expand_dims(frame, axis=0))[0]
cv2.imshow("Feed", frame)
# Press 'c' to Capture Snapshot
if cv2.waitKey(1) == ord("c"):
cv2.imwrite(os.path.join(u.IMAGE_PATH, "Snapshot_{}.png".format(count)), frame)
print("Captured Snapshot - {}".format(count))
count += 1
# Press 'q' to Quit
if cv2.waitKey(1) == ord("q"):
break
u.breaker()
# Release the capture object and destroy all windows
cap.release()
cv2.destroyAllWindows()
```
#### File: References and Tests/Capture/cli.py
```python
import os
import cv2
import sys
import platform
from termcolor import colored
# ******************************************************************************************************************** #
# Setting up self-aware Image Capture Directory
SAVE_PATH = os.path.join(os.getcwd(), "Captures")
if not os.path.exists(SAVE_PATH):
os.makedirs(SAVE_PATH)
# Custom Print Function
def myprint(text, color, on_color=None):
print(colored(text, color=color, on_color=on_color))
# Linebreaker
def breaker(num=50, char="*"):
print(colored("\n" + num*char + "\n", color="cyan"))
# Initialize the capture object
def init_video(ID, w, h):
if platform.system() != "Windows":
cap = cv2.VideoCapture(ID)
else:
cap = cv2.VideoCapture(ID, cv2.CAP_DSHOW)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, w)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, h)
cap.set(cv2.CAP_PROP_FPS, 30)
return cap
# ******************************************************************************************************************** #
# Function that captures an igae form the webcam feed
def image_capture(CaptureObject):
"""
CaptureObject: OpenCV VideoCapture Object that has been initialized
"""
count = 1
breaker()
# Read data from capture object
while CaptureObject.isOpened():
_, frame = CaptureObject.read()
# Press 'c' to Capture frame
if cv2.waitKey(1) == ord("c"):
cv2.imwrite(os.path.join(SAVE_PATH, "Snapshot_{}.png".format(count)), frame)
print("Captured Snapshot - {}".format(count))
count += 1
# Display the frame
cv2.imshow("Webcam Feed", frame)
# Press 'q' to Quit
if cv2.waitKey(1) == ord("q"):
break
breaker()
# Release the capture object and destroy all windows
CaptureObject.release()
cv2.destroyAllWindows()
# ******************************************************************************************************************** #
# Functionality to capture a snippet of the realtime webcam feed. Not Implemented
def video_capture(CaptureObject, number_of_frames):
pass
# ******************************************************************************************************************** #
"""
CLI Arguments:
1. --id : Device ID used for Video Capture (default: 0)
2. --w : Width fo the Capture Frame (Default: 640)
3. --h : Height of the Capture Frame (Default: 360)
"""
def app():
args_1 = "--id"
args_2 = "--w"
args_3 = "--h"
# Default CLI Argument Values
device_id = 0
w = 640
h = 360
# CLI Argument Handling
if args_1 in sys.argv:
device_id = int(sys.argv[sys.argv.index(args_1) + 1])
if args_2 in sys.argv:
w = int(sys.argv[sys.argv.index(args_2) + 1])
if args_3 in sys.argv:
h = int(sys.argv[sys.argv.index(args_3) + 1])
cap = init_video(device_id, w, h)
image_capture(cap)
# ******************************************************************************************************************** #
```
#### File: References and Tests/Capture/gui.py
```python
import os
import sys
import cv2
import platform
import tkinter as tk
from PIL import Image, ImageTk
# Webcam Canvas Attributes
CAM_WIDTH, CAM_HEIGHT = 640, 360
# ******************************************************************************************************************** #
# Setting up self-aware Image Capture Directory
SAVE_PATH = os.path.join(os.getcwd(), "Captures")
if not os.path.exists(SAVE_PATH):
os.makedirs(SAVE_PATH)
# ******************************************************************************************************************** #
class Video(object):
def __init__(self, device_id=None, width=None, height=None, fps=30):
"""
device_id: (int) Device ID of the capture device (can be set via command line)
width: (int) Width of the Capture Frame
height: (int) Height of the Capture Frame
fps: (int) FPS of the Capture Object
"""
self.device_id = device_id
self.width = width
self.height = height
self.fps = fps
self.cap = None
# Initialize the capture object
def start(self):
if platform.system() != "Windows":
self.cap = cv2.VideoCapture(self.device_id)
else:
self.cap = cv2.VideoCapture(self.device_id, cv2.CAP_DSHOW)
self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, self.width)
self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, self.height)
self.cap.set(cv2.CAP_PROP_FPS, self.fps)
# Releases the capture object
def stop(self):
if self.cap.isOpened():
self.cap.release()
# Gets a frame from the capture object
def get_frame(self):
if self.cap.isOpened():
ret, frame = self.cap.read()
if ret:
return ret, cv2.cvtColor(src=frame, code=cv2.COLOR_BGR2RGB)
else:
return ret, None
# ******************************************************************************************************************** #
# Tkinter Frame that handles the Video Feed
class VideoFrame(tk.Frame):
def __init__(self, master, V=None, w=None, h=None, *args, **kwargs):
"""
master: master widget upon which this works
V: Video Capture Object
w: Width of the Canvas Used
h: height of the Canvas Used
"""
tk.Frame.__init__(self, master, *args, **kwargs)
self.master = master
self.V = V
self.image = None
self.canvas = tk.Canvas(self, width=w, height=h, background="black")
self.canvas.pack()
self.delay = 15
self.id = None
# Function to update the canvas every 15 ms
def update(self):
ret, frame = self.V.get_frame()
if ret:
self.image = ImageTk.PhotoImage(Image.fromarray(frame))
self.canvas.create_image(0, 0, anchor="nw", image=self.image)
self.id = self.after(self.delay, self.update)
# Function to start the Video Capture
def start(self):
self.update()
# Function to stop the Video Capture
def stop(self):
if self.id:
self.after_cancel(self.id)
self.id = None
# ******************************************************************************************************************** #
# Tkinter Frame that handles the Buttons
class ActionFrame(tk.Frame):
def __init__(self, master, V=None, path=None, *args, **kwargs):
tk.Frame.__init__(self, master, *args, **kwargs)
"""
master: master widget upon which this works
V: Video Capture Object
path: Path to which to save the image capture
"""
self.master = master
self.V = V
self.path = path
self.count = 1
button_height, button_width = 2, 20
# Capture Button Setup
self.CaptureButton = tk.Button(self, text="Capture", background="#00E8FF", activebackground="#86F4FF", foreground="black",
width=button_width, height=button_height, relief="raised", command=self.capture)
self.CaptureButton.grid(row=0, column=0)
# Quit Button Setup
self.QuitButton = tk.Button(self, text="Quit", background="#FF0000", activebackground="#FCAEAE", foreground="black",
width=button_width, height=button_height, relief="raised", command=self.do_quit)
self.QuitButton.grid(row=0, column=1)
# Capture Button Callback
def capture(self):
ret, frame = self.V.get_frame()
if ret:
cv2.imwrite(os.path.join(self.path, "Snapshot_{}.png".format(self.count)), cv2.cvtColor(src=frame, code=cv2.COLOR_BGR2RGB))
self.count += 1
# Quit Button Callback
def do_quit(self):
self.master.destroy()
# ******************************************************************************************************************** #
# Wrapper for all the Tkinter Frames
class Main(object):
def __init__(self, master, id=None, w=None, h=None, path=None, num_of_frames=None):
V = Video(id, w, h)
V.start()
VideoWidget = VideoFrame(master, V=V, w=w, h=h)
VideoWidget.pack()
VideoWidget.start()
ActionWidget = ActionFrame(master, V=V, path=path)
ActionWidget.pack()
# ******************************************************************************************************************** #
"""
CLI Arguments:
1. --id : Device ID used for Video Capture (default: 0)
2. --w : Width fo the Capture Frame (Default: 640)
3. --h : Height of the Capture Frame (Default: 360)
"""
def app():
args_1 = "--id"
args_2 = "--w"
args_3 = "--h"
# Default CLI argument values
device_id = 0
w = 640
h = 360
# CLI Argument Handling
if args_1 in sys.argv:
device_id = int(sys.argv[sys.argv.index(args_1) + 1])
if args_2 in sys.argv:
w = int(sys.argv[sys.argv.index(args_2) + 1])
if args_3 in sys.argv:
h = int(sys.argv[sys.argv.index(args_3) + 1])
# Open a new Tkinter Window
root = tk.Tk()
# Setting up the root window size
ww, wh = int(1.05*w), int(1.175*h)
root.geometry("{}x{}".format(ww, wh))
# Setting up the root window title
root.title("Capture Application")
# Calling the Application Wrapper
Main(root, device_id, w, h, SAVE_PATH)
# Start
root.mainloop()
# ******************************************************************************************************************** #
```
#### File: References and Tests/Patch Pattern Recognition/cli.py
```python
import os
import cv2
import sys
import utils as u
from Snapshot import capture_snapshot
from Processor import process_patches_in_video
# ******************************************************************************************************************** #
def app():
args_1 = "--part-name"
args_2 = "--filename"
args_3 = "--capture"
args_4 = "--process"
args_5 = "--similarity"
# Default CLI Argument Values
do_capture, do_process = None, None
similarity = 0.8
# CLI Argument Handling
if args_1 in sys.argv:
p_name = sys.argv[sys.argv.index(args_1) + 1]
if args_2 in sys.argv:
f_name = sys.argv[sys.argv.index(args_2) + 1]
if args_3 in sys.argv:
do_capture = True
if args_4 in sys.argv:
do_process = True
if args_5 in sys.argv:
similarity = float(sys.argv[sys.argv.index(args_5) + 1])
u.breaker()
u.myprint("--- Application Start ---", color="green")
# Runs if --capture is specified
if do_capture:
capture_snapshot(part_name=p_name)
# Runs if --process is specified
if do_process:
path = os.path.join(u.IMAGE_PATH, p_name)
patch = u.preprocess(cv2.imread(os.path.join(path, f_name), cv2.IMREAD_COLOR))
process_patches_in_video(patch, similarity)
u.myprint("\n--- Application End ---", color="green")
u.breaker()
# ******************************************************************************************************************** #
```
#### File: References and Tests/Patch Processing/cli.py
```python
import sys
import utils as u
from Processor import process_video_as_patches
# ******************************************************************************************************************** #
def app():
args_1 = "--pw"
args_2 = "--ph"
args_3 = "--test"
# Default CLI Arguments
pw, ph, test = 48, 48, None
# CLI Argument Handling
if args_1 in sys.argv:
pw = int(sys.argv[sys.argv.index(args_1) + 1])
if args_2 in sys.argv:
ph = int(sys.argv[sys.argv.index(args_2) + 1])
if args_3 in sys.argv:
test = int(sys.argv[sys.argv.index(args_3) + 1])
u.breaker()
u.myprint("--- Application Start ---", color="green")
process_video_as_patches(pw, ph, test=test)
u.myprint("\n--- Application End ---", color="green")
u.breaker()
# ******************************************************************************************************************** #
```
#### File: References and Tests/Tkinter Videos/ButtonFrame.py
```python
import tkinter as tk
# Tkinter Frame that handles the Buttons
class ButtonFrame(tk.Frame):
def __init__(self, master, VideoWidget=None, *args, **kwargs):
tk.Frame.__init__(self, master, *args, **kwargs)
"""
master: master widget upon which this works
VideoWidget: Video Capture Frame
"""
self.master = master
self.VideoWidget = VideoWidget
self.button_height = 2
self.button_width = 20
# Start Button Setup
self.startButton = tk.Button(self, text="Start",
width=self.button_width, height=self.button_height,
background="#23EF13", activebackground="#9AF592", foreground="black",
relief="raised", command=self.do_start)
self.startButton.grid(row=0, column=0)
# Stop Button Setup
self.stopButton = tk.Button(self, text="Stop",
width=self.button_width, height=self.button_height,
background="#FFC500", activebackground="#FFE99E", foreground="black",
relief="raised", command=self.do_stop)
self.stopButton.grid(row=0, column=1)
# Quit Button Setup
self.quitButton = tk.Button(self, text="Quit",
width=self.button_width, height=self.button_height,
background="red", activebackground="#FCAEAE", foreground="black",
relief="raised", command=self.do_quit)
self.quitButton.grid(row=0, column=2)
# Start Button Callback
def do_start(self):
self.VideoWidget.start()
# Stop Button Callback
def do_stop(self):
self.VideoWidget.stop()
# Quit Button Callback
def do_quit(self):
self.master.master.destroy()
``` |
{
"source": "123raji/MY-APPLICATION-STORE-REPO",
"score": 3
} |
#### File: 123raji/MY-APPLICATION-STORE-REPO/SQS-LAMBDA-CloudWatchLogs.py
```python
import json
def lambda_handler(event, context):
for record in event['Records']:
print ("Lambda")
payload=record["body"]
print(str(payload))
```
#### File: 123raji/MY-APPLICATION-STORE-REPO/SQS-SNS-LAMBDA.py
```python
import json
import boto3
import os
def lambda_handler(event, context):
batch_processes=[]
for record in event['Records']:
send_request(record["body"])
def send_request(body):
# Create an SNS client
sns = boto3.client('sns')
# Publish a simple message to the specified SNS topic
response = sns.publish(
TopicArn="arn:aws:sns:us-west-2:464599248654:JJTech-Test-Delete",
Message=body,
)
# Print out the response
print(response)
``` |
{
"source": "123seven/fastgm",
"score": 2
} |
#### File: 123seven/fastgm/setup.py
```python
import os
from sys import version_info
from setuptools import setup, Extension, find_packages
try:
from Cython.Build import cythonize
except ImportError:
cythonize = None
if version_info[0] == 2:
from io import open
# https://cython.readthedocs.io/en/latest/src/userguide/source_files_and_compilation.html#distributing-cython-modules
def no_cythonize(extensions, **_ignore):
for extension in extensions:
sources = []
for sfile in extension.sources:
path, ext = os.path.splitext(sfile)
if ext in (".pyx", ".py"):
if extension.language == "c++":
ext = ".cpp"
else:
ext = ".c"
sfile = path + ext
sources.append(sfile)
extension.sources[:] = sources
return extensions
extensions = [
Extension(
'fastgm.sm4',
sources=['src/fastgm/sm4.pyx'],
),
Extension(
'fastgm.sm3',
sources=['src/fastgm/sm3.pyx'],
)
]
CYTHONIZE = bool(int(os.getenv("CYTHONIZE", 0))) and cythonize is not None
if CYTHONIZE:
compiler_directives = {"embedsignature": True}
extensions = cythonize(extensions, compiler_directives=compiler_directives)
else:
extensions = no_cythonize(extensions)
setup(
name="fastgm",
version="0.3.1", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
author="wptoux",
author_email="<EMAIL>",
description="Fast GMSSL Library for Python",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
keywords="国密 GM GMSSL SM2 SM3 SM4 Cython",
license="Apache",
url="https://github.com/wptoux/fastgm",
zip_safe=False,
package_dir={"": "src"},
packages=find_packages("src"),
ext_modules=extensions,
options={"bdist_wheel": {"universal": True}},
)
``` |
{
"source": "123seven/ruia",
"score": 3
} |
#### File: examples/hacker_news_spider/hn2mongo.py
```python
from ruia_motor import RuiaMotorInsert, init_spider
from ruia import AttrField, Item, Spider, TextField
class HackerNewsItem(Item):
target_item = TextField(css_select="tr.athing")
title = TextField(css_select="a.storylink")
url = AttrField(css_select="a.storylink", attr="href")
class HackerNewsSpider(Spider):
start_urls = [f"https://news.ycombinator.com/news?p={index}" for index in range(3)]
concurrency = 3
# 设置代理
aiohttp_kwargs = {"proxy": "http://0.0.0.0:1087"}
async def parse(self, response):
async for item in HackerNewsItem.get_items(html=await response.text()):
yield RuiaMotorInsert(collection="news", data=item.results)
async def init_plugins_after_start(spider_ins):
spider_ins.mongodb_config = {"host": "127.0.0.1", "port": 27017, "db": "ruia_motor"}
init_spider(spider_ins=spider_ins)
if __name__ == "__main__":
HackerNewsSpider.start(after_start=init_plugins_after_start)
``` |
{
"source": "123seven/wecaht_tools",
"score": 2
} |
#### File: wecaht_tools/wechat/exceptions.py
```python
from __future__ import absolute_import, unicode_literals
class WeChatException(Exception):
"""Base exception for wechat"""
def __init__(self, errcode, errmsg):
"""
:param errcode: Error code
:param errmsg: Error message
"""
self.errcode = errcode or 40000
self.errmsg = errmsg
def __str__(self):
_repr = {
'retCode': self.errcode,
'retMsg': self.errmsg
}
return _repr
def __repr__(self):
_repr = {
'Class': self.__class__.__name__,
'retCode': self.errcode,
'retMsg': self.errmsg
}
return _repr
class WeChatClientException(WeChatException):
"""WeChat API client exception class"""
def __init__(self, errcode, errmsg, client=None, request=None, response=None):
super(WeChatClientException, self).__init__(errcode, errmsg)
self.request = request
self.response = response
class InvalidSignatureException(WeChatException):
"""Invalid signature exception class"""
def __init__(self, errcode=-40001, errmsg='Invalid signature'):
super(InvalidSignatureException, self).__init__(errcode, errmsg)
class APILimitedException(WeChatClientException):
"""WeChat API call limited exception class"""
pass
class InvalidAppIdException(WeChatException):
"""Invalid app_id exception class"""
def __init__(self, errcode=-40005, errmsg='Invalid AppId'):
super(InvalidAppIdException, self).__init__(errcode, errmsg)
class WeChatOAuthException(WeChatClientException):
"""WeChat OAuth API exception class"""
pass
class WeChatComponentOAuthException(WeChatClientException):
"""WeChat Component OAuth API exception class"""
pass
``` |
{
"source": "123swk123/esp-idf",
"score": 2
} |
#### File: lwip/weekend_test/net_suite_test.py
```python
import re
import os
import sys
import socket
from threading import Thread, Event
import subprocess
import time
from shutil import copyfile
try:
import IDF
except ImportError:
# this is a test case write with tiny-test-fw.
# to run test cases outside tiny-test-fw,
# we need to set environment variable `TEST_FW_PATH`,
# then get and insert `TEST_FW_PATH` to sys path before import FW module
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
import DUT
import Utility
stop_sock_listener = Event()
stop_io_listener = Event()
sock = None
client_address = None
manual_test = False
def io_listener(dut1):
global sock
global client_address
data = b''
while not stop_io_listener.is_set():
try:
data = dut1.expect(re.compile(r"PacketOut:\[([a-fA-F0-9]+)\]"), timeout=5)
except DUT.ExpectTimeout:
continue
if data != () and data[0] != b'':
packet_data = data[0]
print("Packet_data>{}<".format(packet_data))
response = bytearray.fromhex(packet_data.decode())
print("Sending to socket:")
packet = ' '.join(format(x, '02x') for x in bytearray(response))
print("Packet>{}<".format(packet))
if client_address is not None:
sock.sendto(response, ('127.0.0.1', 7777))
def sock_listener(dut1):
global sock
global client_address
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.settimeout(5)
server_address = '0.0.0.0'
server_port = 7771
server = (server_address, server_port)
sock.bind(server)
try:
while not stop_sock_listener.is_set():
try:
payload, client_address = sock.recvfrom(1024)
packet = ' '.join(format(x, '02x') for x in bytearray(payload))
print("Received from address {}, data {}".format(client_address, packet))
dut1.write(str.encode(packet))
except socket.timeout:
pass
finally:
sock.close()
sock = None
@IDF.idf_example_test(env_tag="Example_WIFI")
def lwip_test_suite(env, extra_data):
global stop_io_listener
global stop_sock_listener
"""
steps: |
1. Rebuilds test suite with esp32_netsuite.ttcn
2. Starts listeners on stdout and socket
3. Execute ttcn3 test suite
4. Collect result from ttcn3
"""
dut1 = env.get_dut("net_suite", "examples/system/network_tests")
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, "net_suite.bin")
bin_size = os.path.getsize(binary_file)
IDF.log_performance("net_suite", "{}KB".format(bin_size // 1024))
IDF.check_performance("net_suite", bin_size // 1024)
dut1.start_app()
thread1 = Thread(target=sock_listener, args=(dut1, ))
thread2 = Thread(target=io_listener, args=(dut1, ))
if not manual_test:
# Variables refering to esp32 ttcn test suite
TTCN_SRC = 'esp32_netsuite.ttcn'
TTCN_CFG = 'esp32_netsuite.cfg'
# System Paths
netsuite_path = os.getenv("NETSUITE_PATH")
netsuite_src_path = os.path.join(netsuite_path, "src")
test_dir = os.path.dirname(os.path.realpath(__file__))
# Building the suite
print("Rebuilding the test suite")
print("-------------------------")
# copy esp32 specific files to ttcn net-suite dir
copyfile(os.path.join(test_dir, TTCN_SRC), os.path.join(netsuite_src_path, TTCN_SRC))
copyfile(os.path.join(test_dir, TTCN_CFG), os.path.join(netsuite_src_path, TTCN_CFG))
proc = subprocess.Popen(['bash', '-c', 'cd ' + netsuite_src_path + ' && source make.sh'],
cwd=netsuite_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = proc.stdout.read()
print("Note: First build step we expect failure (titan/net_suite build system not suitable for multijob make)")
print(output)
proc = subprocess.Popen(['bash', '-c', 'cd ' + netsuite_src_path + ' && make'],
cwd=netsuite_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print("Note: This time all dependencies shall be generated -- multijob make shall pass")
output = proc.stdout.read()
print(output)
# Executing the test suite
thread1.start()
thread2.start()
time.sleep(2)
print("Executing the test suite")
print("------------------------")
proc = subprocess.Popen(['ttcn3_start', os.path.join(netsuite_src_path,'test_suite'), os.path.join(netsuite_src_path, TTCN_CFG)],
stdout=subprocess.PIPE)
output = proc.stdout.read()
print(output)
print("Collecting results")
print("------------------")
verdict_stats = re.search('(Verdict statistics:.*)', output)
if verdict_stats:
verdict_stats = verdict_stats.group(1)
else:
verdict_stats = b""
verdict = re.search('Overall verdict: pass', output)
if verdict:
print("Test passed!")
Utility.console_log(verdict_stats, "green")
else:
Utility.console_log(verdict_stats, "red")
raise ValueError('Test failed with: {}'.format(verdict_stats))
else:
try:
# Executing the test suite
thread1.start()
thread2.start()
time.sleep(2)
while True:
time.sleep(0.5)
except KeyboardInterrupt:
pass
print("Executing done, waiting for tests to finish")
print("-------------------------------------------")
stop_io_listener.set()
stop_sock_listener.set()
thread1.join()
thread2.join()
if __name__ == '__main__':
print("Manual execution, please build and start ttcn in a separate console")
manual_test = True
lwip_test_suite()
```
#### File: nimble/blehr/blehr_test.py
```python
from __future__ import print_function
import os
import sys
import re
import threading
import traceback
import Queue
import subprocess
try:
# This environment variable is expected on the host machine
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
import IDF
except ImportError as e:
print(e)
print("\nCheck your IDF_PATH\nOR")
print("Try `export TEST_FW_PATH=$IDF_PATH/tools/tiny-test-fw` for resolving the issue\nOR")
print("Try `pip install -r $IDF_PATH/tools/tiny-test-fw/requirements.txt` for resolving the issue\n")
import IDF
try:
import lib_ble_client
except ImportError:
lib_ble_client_path = os.getenv("IDF_PATH") + "/tools/ble"
if lib_ble_client_path and lib_ble_client_path not in sys.path:
sys.path.insert(0, lib_ble_client_path)
import lib_ble_client
import Utility
# When running on local machine execute the following before running this script
# > make app bootloader
# > make print_flash_cmd | tail -n 1 > build/download.config
# > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw
def blehr_client_task(hr_obj, dut_addr):
interface = 'hci0'
ble_devname = 'blehr_sensor_1.0'
hr_srv_uuid = '180d'
hr_char_uuid = '2a37'
# Get BLE client module
ble_client_obj = lib_ble_client.BLE_Bluez_Client(interface, devname=ble_devname, devaddr=dut_addr)
if not ble_client_obj:
raise RuntimeError("Failed to get DBus-Bluez object")
# Discover Bluetooth Adapter and power on
is_adapter_set = ble_client_obj.set_adapter()
if not is_adapter_set:
raise RuntimeError("Adapter Power On failed !!")
# Connect BLE Device
is_connected = ble_client_obj.connect()
if not is_connected:
# Call disconnect to perform cleanup operations before exiting application
ble_client_obj.disconnect()
raise RuntimeError("Connection to device " + str(ble_devname) + " failed !!")
# Read Services
services_ret = ble_client_obj.get_services()
if services_ret:
Utility.console_log("\nServices\n")
Utility.console_log(str(services_ret))
else:
ble_client_obj.disconnect()
raise RuntimeError("Failure: Read Services failed")
'''
Blehr application run:
Start Notifications
Retrieve updated value
Stop Notifications
'''
blehr_ret = ble_client_obj.hr_update_simulation(hr_srv_uuid, hr_char_uuid)
if blehr_ret:
Utility.console_log("Success: blehr example test passed")
else:
raise RuntimeError("Failure: blehr example test failed")
# Call disconnect to perform cleanup operations before exiting application
ble_client_obj.disconnect()
class BleHRThread(threading.Thread):
def __init__(self, dut_addr, exceptions_queue):
threading.Thread.__init__(self)
self.dut_addr = dut_addr
self.exceptions_queue = exceptions_queue
def run(self):
try:
blehr_client_task(self, self.dut_addr)
except Exception:
self.exceptions_queue.put(traceback.format_exc(), block=False)
@IDF.idf_example_test(env_tag="Example_WIFI_BT")
def test_example_app_ble_hr(env, extra_data):
"""
Steps:
1. Discover Bluetooth Adapter and Power On
2. Connect BLE Device
3. Start Notifications
4. Updated value is retrieved
5. Stop Notifications
"""
subprocess.check_output(['rm','-rf','/var/lib/bluetooth/*'])
subprocess.check_output(['hciconfig','hci0','reset'])
# Acquire DUT
dut = env.get_dut("blehr", "examples/bluetooth/nimble/blehr")
# Get binary file
binary_file = os.path.join(dut.app.binary_path, "blehr.bin")
bin_size = os.path.getsize(binary_file)
IDF.log_performance("blehr_bin_size", "{}KB".format(bin_size // 1024))
IDF.check_performance("blehr_bin_size", bin_size // 1024)
# Upload binary and start testing
Utility.console_log("Starting blehr simple example test app")
dut.start_app()
dut.reset()
# Get device address from dut
dut_addr = dut.expect(re.compile(r"Device Address: ([a-fA-F0-9:]+)"), timeout=30)[0]
exceptions_queue = Queue.Queue()
# Starting a py-client in a separate thread
blehr_thread_obj = BleHRThread(dut_addr, exceptions_queue)
blehr_thread_obj.start()
blehr_thread_obj.join()
exception_msg = None
while True:
try:
exception_msg = exceptions_queue.get(block=False)
except Queue.Empty:
break
else:
Utility.console_log("\n" + exception_msg)
if exception_msg:
raise Exception("Thread did not run successfully")
# Check dut responses
dut.expect("subscribe event; cur_notify=1", timeout=30)
dut.expect("subscribe event; cur_notify=0", timeout=30)
dut.expect("disconnect;", timeout=30)
if __name__ == '__main__':
test_example_app_ble_hr()
```
#### File: esp_app_trace/espytrace/apptrace.py
```python
import os
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
import threading
import tempfile
import time
import subprocess
import os.path
import elftools.elf.elffile as elffile
import elftools.elf.constants as elfconst
def addr2line(toolchain, elf_path, addr):
"""
Creates trace reader.
Parameters
----------
toolchain : string
toolchain prefix to retrieve source line locations using addresses
elf_path : string
path to ELF file to use
addr : int
address to retrieve source line location
Returns
-------
string
source line location string
"""
try:
return subprocess.check_output(['%saddr2line' % toolchain, '-e', elf_path, '0x%x' % addr]).decode("utf-8")
except subprocess.CalledProcessError:
return ''
class ParseError(RuntimeError):
"""
Parse error exception
"""
def __init__(self, message):
RuntimeError.__init__(self, message)
class ReaderError(RuntimeError):
"""
Trace reader error exception
"""
def __init__(self, message):
RuntimeError.__init__(self, message)
class ReaderTimeoutError(ReaderError):
"""
Trace reader timeout error
"""
def __init__(self, tmo, sz):
ReaderError.__init__(self, 'Timeout %f sec while reading %d bytes!' % (tmo, sz))
class ReaderShutdownRequest(ReaderError):
"""
Trace reader shutdown request error
Raised when user presses CTRL+C (SIGINT).
"""
def __init__(self):
ReaderError.__init__(self, 'Shutdown request!')
class Reader:
"""
Base abstract reader class
"""
def __init__(self, tmo):
"""
Constructor
Parameters
----------
tmo : int
read timeout
"""
self.timeout = tmo
self.need_stop = False
def read(self, sz):
"""
Reads a number of bytes
Parameters
----------
sz : int
number of bytes to read
Returns
-------
bytes object
read bytes
Returns
-------
ReaderTimeoutError
if timeout expires
ReaderShutdownRequest
if SIGINT was received during reading
"""
pass
def readline(self):
"""
Reads line
Parameters
----------
sz : int
number of bytes to read
Returns
-------
string
read line
"""
pass
def forward(self, sz):
"""
Moves read pointer to a number of bytes
Parameters
----------
sz : int
number of bytes to read
"""
pass
def cleanup(self):
"""
Cleans up reader
"""
self.need_stop = True
class FileReader(Reader):
"""
File reader class
"""
def __init__(self, path, tmo):
"""
Constructor
Parameters
----------
path : string
path to file to read
tmo : int
see Reader.__init__()
"""
Reader.__init__(self, tmo)
self.trace_file_path = path
self.trace_file = open(path, 'rb')
def read(self, sz):
"""
see Reader.read()
"""
data = b''
start_tm = time.clock()
while not self.need_stop:
data += self.trace_file.read(sz - len(data))
if len(data) == sz:
break
if self.timeout != -1 and time.clock() >= start_tm + self.timeout:
raise ReaderTimeoutError(self.timeout, sz)
if self.need_stop:
raise ReaderShutdownRequest()
return data
def get_pos(self):
"""
Retrieves current file read position
Returns
-------
int
read position
"""
return self.trace_file.tell()
def readline(self, linesep=os.linesep):
"""
see Reader.read()
"""
line = ''
start_tm = time.clock()
while not self.need_stop:
line += self.trace_file.readline().decode("utf-8")
if line.endswith(linesep):
break
if self.timeout != -1 and time.clock() >= start_tm + self.timeout:
raise ReaderTimeoutError(self.timeout, 1)
if self.need_stop:
raise ReaderShutdownRequest()
return line
def forward(self, sz):
"""
see Reader.read()
"""
cur_pos = self.trace_file.tell()
start_tm = time.clock()
while not self.need_stop:
file_sz = os.path.getsize(self.trace_file_path)
if file_sz - cur_pos >= sz:
break
if self.timeout != -1 and time.clock() >= start_tm + self.timeout:
raise ReaderTimeoutError(self.timeout, sz)
if self.need_stop:
raise ReaderShutdownRequest()
self.trace_file.seek(sz, os.SEEK_CUR)
class NetRequestHandler:
"""
Handler for incoming network requests (connections, datagrams)
"""
def handle(self):
while not self.server.need_stop:
data = self.rfile.read(1024)
if len(data) == 0:
break
self.server.wtrace.write(data)
self.server.wtrace.flush()
class NetReader(FileReader):
"""
Base netwoek socket reader class
"""
def __init__(self, tmo):
"""
see Reader.__init__()
"""
fhnd,fname = tempfile.mkstemp()
FileReader.__init__(self, fname, tmo)
self.wtrace = os.fdopen(fhnd, 'wb')
self.server_thread = threading.Thread(target=self.serve_forever)
self.server_thread.start()
def cleanup(self):
"""
see Reader.cleanup()
"""
FileReader.cleanup(self)
self.shutdown()
self.server_close()
self.server_thread.join()
time.sleep(0.1)
self.trace_file.close()
self.wtrace.close()
class TCPRequestHandler(NetRequestHandler, SocketServer.StreamRequestHandler):
"""
Handler for incoming TCP connections
"""
pass
class TCPReader(NetReader, SocketServer.TCPServer):
"""
TCP socket reader class
"""
def __init__(self, host, port, tmo):
"""
Constructor
Parameters
----------
host : string
see SocketServer.BaseServer.__init__()
port : int
see SocketServer.BaseServer.__init__()
tmo : int
see Reader.__init__()
"""
SocketServer.TCPServer.__init__(self, (host, port), TCPRequestHandler)
NetReader.__init__(self, tmo)
class UDPRequestHandler(NetRequestHandler, SocketServer.DatagramRequestHandler):
"""
Handler for incoming UDP datagrams
"""
pass
class UDPReader(NetReader, SocketServer.UDPServer):
"""
UDP socket reader class
"""
def __init__(self, host, port, tmo):
"""
Constructor
Parameters
----------
host : string
see SocketServer.BaseServer.__init__()
port : int
see SocketServer.BaseServer.__init__()
tmo : int
see Reader.__init__()
"""
SocketServer.UDPServer.__init__(self, (host, port), UDPRequestHandler)
NetReader.__init__(self, tmo)
def reader_create(trc_src, tmo):
"""
Creates trace reader.
Parameters
----------
trc_src : string
trace source URL. Supports 'file:///path/to/file' or (tcp|udp)://host:port
tmo : int
read timeout
Returns
-------
Reader
reader object or None if URL scheme is not supported
"""
url = urlparse(trc_src)
if len(url.scheme) == 0 or url.scheme == 'file':
if os.name == 'nt':
# workaround for Windows path
return FileReader(trc_src[7:], tmo)
else:
return FileReader(url.path, tmo)
if url.scheme == 'tcp':
return TCPReader(url.hostname, url.port, tmo)
if url.scheme == 'udp':
return UDPReader(url.hostname, url.port, tmo)
return None
class TraceDataProcessor:
"""
Base abstract class for all trace data processors.
"""
def __init__(self, print_events, keep_all_events=False):
"""
Constructor.
Parameters
----------
print_events : bool
if True every event will be printed as they arrive
keep_all_events : bool
if True all events will be kept in self.events in the order they arrive
"""
self.print_events = print_events
self.keep_all_events = keep_all_events
self.total_events = 0
self.events = []
# This can be changed by the root procesor that includes several sub-processors.
# It is used access some method of root processor which can contain methods/data common for all sub-processors.
# Common info could be current execution context, info about running tasks, available IRQs etc.
self.root_proc = self
def _print_event(self, event):
"""
Base method to print an event.
Parameters
----------
event : object
Event object
"""
print("EVENT[{:d}]: {}".format(self.total_events, event))
def print_report(self):
"""
Base method to print report.
"""
print("Processed {:d} events".format(self.total_events))
def cleanup(self):
"""
Base method to make cleanups.
"""
pass
def on_new_event(self, event):
"""
Base method to process event.
"""
if self.print_events:
self._print_event(event)
if self.keep_all_events:
self.events.append(event)
self.total_events += 1
class LogTraceParseError(ParseError):
"""
Log trace parse error exception.
"""
pass
def get_str_from_elf(felf, str_addr):
"""
Retrieves string from ELF file.
Parameters
----------
felf : elffile.ELFFile
open ELF file handle to retrive format string from
str_addr : int
address of the string
Returns
-------
string
string or None if it was not found
"""
tgt_str = ''
for sect in felf.iter_sections():
if sect['sh_addr'] == 0 or (sect['sh_flags'] & elfconst.SH_FLAGS.SHF_ALLOC) == 0:
continue
if str_addr < sect['sh_addr'] or str_addr >= sect['sh_addr'] + sect['sh_size']:
continue
sec_data = sect.data()
for i in range(str_addr - sect['sh_addr'], sect['sh_size']):
if type(sec_data) is str:
ch = sec_data[i]
else:
ch = str(chr(sec_data[i]))
if ch == '\0':
break
tgt_str += ch
if len(tgt_str) > 0:
return tgt_str
return None
class LogTraceEvent:
"""
Log trace event.
"""
def __init__(self, fmt_addr, log_args):
"""
Constructor.
Parameters
----------
fmt_addr : int
address of the format string
log_args : list
list of log message arguments
"""
self.fmt_addr = fmt_addr
self.args = log_args
def get_message(self, felf):
"""
Retrieves log message.
Parameters
----------
felf : elffile.ELFFile
open ELF file handle to retrive format string from
Returns
-------
string
formatted log message
Raises
------
LogTraceParseError
if format string has not been found in ELF file
"""
fmt_str = get_str_from_elf(felf, self.fmt_addr)
if not fmt_str:
raise LogTraceParseError('Failed to find format string for 0x%x' % self.fmt_addr)
prcnt_idx = 0
for i, arg in enumerate(self.args):
prcnt_idx = fmt_str.find('%', prcnt_idx, -2) # TODO: check str ending with %
if prcnt_idx == -1:
break
prcnt_idx += 1 # goto next char
if fmt_str[prcnt_idx] == 's':
# find string
arg_str = get_str_from_elf(felf, self.args[i])
if arg_str:
self.args[i] = arg_str
else:
self.args[i] = '<None>'
fmt_str = fmt_str.replace('%p', '%x')
return fmt_str % tuple(self.args)
class BaseLogTraceDataProcessorImpl:
"""
Base implementation for log data processors.
"""
def __init__(self, print_log_events=False, elf_path=''):
"""
Constructor.
Parameters
----------
print_log_events : bool
if True every log event will be printed as they arrive
elf_path : string
path to ELF file to retrieve format strings for log messages
"""
if len(elf_path):
self.felf = elffile.ELFFile(open(elf_path, 'rb'))
else:
self.felf = None
self.print_log_events = print_log_events
self.messages = []
def cleanup(self):
"""
Cleanup
"""
if self.felf:
self.felf.stream.close()
def print_report(self):
"""
Prints log report
"""
print("=============== LOG TRACE REPORT ===============")
print("Processed {:d} log messages.".format(len(self.messages)))
def on_new_event(self, event):
"""
Processes log events.
Parameters
----------
event : LogTraceEvent
Event object.
"""
msg = event.get_message(self.felf)
self.messages.append(msg)
if self.print_log_events:
print(msg),
class HeapTraceParseError(ParseError):
"""
Heap trace parse error exception.
"""
pass
class HeapTraceDuplicateAllocError(HeapTraceParseError):
"""
Heap trace duplicate allocation error exception.
"""
def __init__(self, addr, new_size, prev_size):
"""
Constructor.
Parameters
----------
addr : int
memory block address
new_size : int
size of the new allocation
prev_size : int
size of the previous allocation
"""
HeapTraceParseError.__init__(self, """Duplicate alloc @ 0x{:x}!
New alloc is {:d} bytes,
previous is {:d} bytes.""".format(addr, new_size, prev_size))
class HeapTraceEvent:
"""
Heap trace event.
"""
def __init__(self, ctx_name, in_irq, core_id, ts, alloc, size, addr, callers, toolchain='', elf_path=''):
"""
Constructor.
Parameters
----------
ctx_name : string
name of event context (task or IRQ name)
in_irq : bool
True if event has been generated in IRQ context, otherwise False
core_id : int
core which generated the event
ts : float
event timestamp
alloc : bool
True for allocation event, otherwise False
size : int
size of allocation; has no meaning for de-allocation event
addr : int
address of allocation/de-allocation
callers : list
list of callers (callstack) for event
toolchain_pref : string
toolchain prefix to retrieve source line locations using addresses
elf_path : string
path to ELF file to retrieve format strings for log messages
"""
self.ctx_name = ctx_name
self.in_irq = in_irq
self.core_id = core_id
self.ts = ts
self.alloc = alloc
self.size = size
self.addr = addr
self.callers = callers
self.toolchain = toolchain
self.elf_path = elf_path
def __repr__(self):
if len(self.toolchain) and len(self.elf_path):
callers = os.linesep
for addr in self.callers:
callers += '{}'.format(addr2line(self.toolchain, self.elf_path, addr))
else:
callers = ''
for addr in self.callers:
if len(callers):
callers += ':'
callers += '0x{:x}'.format(addr)
if self.in_irq:
ctx_desc = 'IRQ "%s"' % self.ctx_name
else:
ctx_desc = 'task "%s"' % self.ctx_name
if self.alloc:
return "[{:.9f}] HEAP: Allocated {:d} bytes @ 0x{:x} from {} on core {:d} by: {}".format(self.ts, self.size,
self.addr, ctx_desc,
self.core_id, callers)
else:
return "[{:.9f}] HEAP: Freed bytes @ 0x{:x} from {} on core {:d} by: {}".format(self.ts, self.addr, ctx_desc,
self.core_id, callers)
class BaseHeapTraceDataProcessorImpl:
"""
Base implementation for heap data processors.
"""
def __init__(self, print_heap_events=False):
"""
Constructor.
Parameters
----------
print_heap_events : bool
if True every heap event will be printed as they arrive
"""
self._alloc_addrs = {}
self.allocs = []
self.frees = []
self.heap_events_count = 0
self.print_heap_events = print_heap_events
def on_new_event(self, event):
"""
Processes heap events. Keeps track of active allocations list.
Parameters
----------
event : HeapTraceEvent
Event object.
"""
self.heap_events_count += 1
if self.print_heap_events:
print(event)
if event.alloc:
if event.addr in self._alloc_addrs:
raise HeapTraceDuplicateAllocError(event.addr, event.size, self._alloc_addrs[event.addr].size)
self.allocs.append(event)
self._alloc_addrs[event.addr] = event
else:
# do not treat free on unknown addresses as errors, because these blocks coould be allocated when tracing was disabled
if event.addr in self._alloc_addrs:
event.size = self._alloc_addrs[event.addr].size
self.allocs.remove(self._alloc_addrs[event.addr])
del self._alloc_addrs[event.addr]
else:
self.frees.append(event)
def print_report(self):
"""
Prints heap report
"""
print("=============== HEAP TRACE REPORT ===============")
print("Processed {:d} heap events.".format(self.heap_events_count))
if len(self.allocs) == 0:
print("OK - Heap errors was not found.")
return
leaked_bytes = 0
for alloc in self.allocs:
leaked_bytes += alloc.size
print(alloc)
for free in self.frees:
if free.addr > alloc.addr and free.addr <= alloc.addr + alloc.size:
print("Possible wrong free operation found")
print(free)
print("Found {:d} leaked bytes in {:d} blocks.".format(leaked_bytes, len(self.allocs)))
```
#### File: tools/test_idf_py/idf_ext.py
```python
def action_extensions(base_actions, project_path=None):
def echo(name, *args, **kwargs):
print(name, args, kwargs)
# Add global options
extensions = {
"global_options": [
{
"names": ["--test-0"],
"help": "Non-deprecated option.",
"deprecated": False
},
{
"names": ["--test-1"],
"help": "Deprecated option 1.",
"deprecated": True
},
{
"names": ["--test-2"],
"help": "Deprecated option 2.",
"deprecated": "Please update your parameters."
},
{
"names": ["--test-3"],
"help": "Deprecated option 3.",
"deprecated": {
"custom_message": "Please update your parameters."
}
},
{
"names": ["--test-4"],
"help": "Deprecated option 3.",
"deprecated": {
"since": "v4.0",
"removed": "v5.0"
}
},
],
"actions": {
"test-0": {
"callback":
echo,
"help":
"Non-deprecated command 0",
"options": [
{
"names": ["--test-sub-0"],
"help": "Non-deprecated subcommand option 0",
"default": None,
},
{
"names": ["--test-sub-1"],
"help": "Deprecated subcommand option 1",
"default": None,
"deprecated": True
},
],
"arguments": [{
"names": ["test-arg-0"],
}],
},
"test-1": {
"callback": echo,
"help": "Deprecated command 1",
"deprecated": "Please use alternative command."
},
},
}
return extensions
``` |
{
"source": "123tarunanand/PokemonTrading",
"score": 2
} |
#### File: 123tarunanand/PokemonTrading/app.py
```python
import hashlib
import json
from time import time,sleep
from uuid import uuid4
import pandas as pd
from flask import Flask,jsonify,request,render_template,redirect,url_for,flash
import random
import pokemon
from blockchain import Blockchain
import requests
import ast
import sys
import threading
import pandas as pd
if len(sys.argv) ==1 :
print("Nodetracker and port number not passed")
exit()
if len(sys.argv) ==2 :
print("Port number not passed")
exit()
# Instantiate our Node
app = Flask(__name__,template_folder='templates')
node = str('http://0.0.0.0:' + sys.argv[2]+ '/node')
# Instantiate the Blockchain
blockchain = Blockchain(node)
pokedex = pd.read_csv('Kanto.csv',index_col='Nat')
def pokemonname(index):
return pokedex['Pokemon'][index]
def regnode():
sleep(1)
#Let tracker know about presence of this node
print("Node registration attempted")
url=str('http://0.0.0.0:' + sys.argv[1]+ '/nodes')
payload={'Node':str('http://0.0.0.0:'+ sys.argv[2]+'/node')}
requests.post(url,data=json.dumps(payload))
print("Node registration completed")
if(len(sys.argv)==1):
print("Node tracker port and node port not passed")
exit()
elif(len(sys.argv)==2):
print("Node port not passed")
exit()
@app.route('/',methods=['GET','POST'])
def start():
if request.method == 'GET':
if blockchain.user:
return redirect(url_for('home'))
return render_template('./home.html')
if blockchain.user:
return redirect(url_for('home'))
username = request.form['user']
if username.isspace() or not username:
return render_template('./home.html')
blockchain.user = username
print("User initiated")
return redirect(url_for('home'))
@app.route('/home')
def home():
pokes=[]
pokenum = blockchain.own_pokes()
for k in pokenum:
pokes.append({'name':pokemonname(k),'id':str(str(k)+'.png')})
return render_template('./main.html',name=blockchain.user,pokes=pokes)
@app.route('/node',methods=['POST'])
def register_node():
data = ast.literal_eval((request.data).decode('utf-8'))
print("New node received-")
print(data['Node'])
blockchain.nodereg(data['Node'])
if data['New'] == 'False':
bk = requests.get(str(data['Node']+"/chain")).json()
while(blockchain.currentmining):
pass
if bk['length'] > len(blockchain.chain):
blockchain.chain = bk['chain']
print("Consensus chain received")
return jsonify("Received")
@app.route('/mine', methods=['GET'])
def mine():
#if len(blockchain.current_trade) == 0:
# return jsonify("No transactions to mine"),200
blockchain.currentmining = True
ctrade = blockchain.current_trade
present = {}
for node in blockchain.nodes:
present[node]= blockchain.other_pokes(node)
present[blockchain.node]=blockchain.own_pokes()
for trade in ctrade:
if (int(trade['sentby1']) in present[trade['trainer1']]) and (int(trade['sentby2']) in present[trade['trainer2']]):
present[trade['trainer1']].append(int(trade['sentby2']))
present[trade['trainer2']].append(int(trade['sentby1']))
present[trade['trainer1']].remove(int(trade['sentby1']))
present[trade['trainer2']].remove(int(trade['sentby2']))
else:
if str(trade['trainer1'])==str((blockchain.node)):
treqs=blockchain.tradereqs
for k in treqs:
if str(k['node']) == str(trade['trainer2']) and str(k['timestamp']) == str(trade['time']):
k['status']="Invalid"
break
blockchain.tradereqs=treqs
print(treqs)
else:
url = str(trade['trainer1']+'/trade/offerresp')
payload={'node':trade['trainer2'],'sent':trade['sentby1'],'rec':trade['sentby2'],'tim':trade['time'],'status':'Invalid'}
requests.post(url,data=json.dumps(payload))
ctrade.remove(trade)
print("Invalid trade removed")
blockchain.trade=ctrade
last_block = blockchain.last_block
last_proof = last_block['proof']
proof = blockchain.proof_of_work(last_proof)
previous_hash = blockchain.hash(last_block)
block = blockchain.new_block(proof,previous_hash)
blockchain.currentmining = False
print("Mining complete")
payload = json.dumps({'chain':blockchain.chain,'length':len(blockchain.chain)})
for node in blockchain.nodes:
url = str(node+'/sync')
requests.post(url,data=payload)
return redirect(url_for('home'))
@app.route('/trade')
def starttrade():
pokes=[]
pokenum = blockchain.own_pokes()
for k in pokenum:
pokes.append({'name':pokemonname(k),'id':str(str(k)+'.png'),'num':k})
return render_template('./trade.html',name=blockchain.user,pokes=pokes)
@app.route('/trade/<param>')
def choosetrain(param):
nodes = blockchain.nodes
response=[]
param = int(param)
for n in nodes:
otherpokes = blockchain.other_pokes(n)
for i in otherpokes:
j = pokemonname(i)
response.append({'name':j,'id':(str(i)+".png"),'num':i,'trainer':n})
name = pokemonname(param)
cur = param
param = str(param)+'.png'
return render_template('./tradereq.html',name=name,id=param,pokes=response,cur=cur)
@app.route('/trade/outg',methods=['GET','POST'])
def tradereq():
if request.method == 'GET':
treq = blockchain.tradereqs
pokes=[]
chain = blockchain.chain
for p in treq:
k={}
i= pokemonname(p['rec'])
j=pokemonname(p['sent'])
k['rec'] = i
k['sent'] = j
k['node'] =p['node']
if (p['status'] == 'Accepted but Unverified'):
for block in blockchain.chain:
for trade in block['trade']:
if trade['trainer1'] == blockchain.node and str(p['timestamp']) == str(trade['time']):
p['status'] = 'Verified'
k['status'] = p['status']
pokes.append(k)
blockchain.tradereqs=treq
return render_template('./tradessent.html',pokes = pokes)
if request.method == 'POST':
timestamp=time()
blockchain.tradereqs.append({'node':request.form['node'],'sent':int(request.form['Sendpoke']),'rec':int(request.form['Rec']),'status':'No response yet','timestamp':timestamp})
payload={'node':blockchain.node,'sent':request.form['Sendpoke'],'rec':request.form['Rec'],'timestamp':timestamp}
url=str(request.form['node']+'/trade/off')
requests.post(url,data = json.dumps(payload))
print("Offer sent")
return redirect('/trade/outg')
@app.route('/trade/off')
def tradeoff():
tre = blockchain.offers
pokes=[]
for p in tre:
k={}
i=pokemonname(p['sent'])
j=pokemonname(p['rec'])
k['sent'] = i
k['rec'] = j
k['node']=p['node']
k['time']=p['timestamp']
k['sentid']=p['sent']
k['recid']=p['rec']
pokes.append(k)
return render_template('./tradeoffers.html',pokes=pokes)
@app.route('/node/trade/off',methods=['POST'])
def tradeoffrec():
if request.method == 'POST':
print("Offer received")
data = ast.literal_eval((request.data).decode('utf-8'))
blockchain.offers.append({'node':data['node'],'sent':int(data['sent']),'rec':int(data['rec']),'timestamp':data['timestamp']})
return jsonify("Received offer")
@app.route('/node/sync',methods=['POST'])
def sync():
data = ast.literal_eval((request.data).decode('utf-8'))
print("Received new block")
while(blockchain.currentmining):
pass
if data['length'] > len(blockchain.chain):
blockchain.chain = data['chain']
toff = blockchain.current_trade
for block in blockchain.chain:
for trade in block['trade']:
for t in toff:
if trade == t:
toff.remove(t)
print("Removed")
blockchain.current_trade = toff
return jsonify("Request complete"),200
@app.route('/trade/unc')
def returnv():
return jsonify(blockchain.current_trade)
@app.route('/node/trade',methods=['POST'])
def add_transaction():
values = ast.literal_eval((request.data).decode('utf-8'))
index = blockchain.new_transaction(values['trainer1'],values['trainer2'],values['sentby1'],values['sentby2'],values['time'])
return jsonify(blockchain.current_trade),200
@app.route('/trade/resp',methods=['POST'])
def traderesponse():
if request.form['submit'] == 'Accept':
offers = blockchain.offers
for o in offers:
if str(o['node']) == str(request.form['node']) and (str(o['sent']) == str(request.form['Sendpoke'])) and (str(o['rec']) == str(request.form['Rec'])) and (str(o['timestamp']) == str(request.form['tim'])):
offers.remove(o)
print("Offer accepted")
break
blockchain.offers = offers
url = str(request.form['node']+'/trade/offerresp')
payload={'node':blockchain.node,'sent':request.form['Sendpoke'],'rec':request.form['Rec'],'tim':request.form['tim'],'status':'Accepted but Unverified'}
requests.post(url,data=json.dumps(payload))
values={'trainer1':request.form['node'],'trainer2':blockchain.node,'sentby1':request.form['Sendpoke'],'sentby2':request.form['Rec'],'time':request.form['tim']}
index = blockchain.new_transaction(values['trainer1'],values['trainer2'],values['sentby1'],values['sentby2'],values['time'])
payload = json.dumps(values)
for node in blockchain.nodes:
url = str(node+'/trade')
requests.post(url,data=payload)
return redirect('/trade/off')
else:
offers = blockchain.offers
for o in offers:
if str(o['node']) == str(request.form['node']) and (str(o['sent']) == str(request.form['Sendpoke'])) and (str(o['rec']) == str(request.form['Rec'])) and (str(o['timestamp']) == str(request.form['tim'])):
offers.remove(o)
print("Offer removed")
break
blockchain.offers = offers
url = str(request.form['node']+'/trade/offerresp')
payload={'node':blockchain.node,'sent':request.form['Sendpoke'],'rec':request.form['Rec'],'tim':request.form['tim'],'status':'Cancelled'}
requests.post(url,data=json.dumps(payload))
return redirect('/trade/off')
@app.route('/node/trade/offerresp',methods=['POST'])
def offresp():
data = ast.literal_eval((request.data).decode('utf-8'))
treqs=blockchain.tradereqs
for k in treqs:
print(k)
if str(k['node']) == str(data['node']) and str(k['timestamp']) == str(data['tim']):
k['status']=str(data['status'])
break
blockchain.tradereqs=treqs
return jsonify("Received")
@app.route('/node/chain', methods=['GET'])
def full_chain():
response = {
'chain': blockchain.chain,
'length': len(blockchain.chain),
}
return jsonify(response), 200
if __name__ == '__main__':
port = int(sys.argv[2])
t1 = threading.Thread(target=regnode)
t1.start()
app.run(host='0.0.0.0', port=port)
```
#### File: 123tarunanand/PokemonTrading/blockchain.py
```python
import hashlib
import json
from time import time
from uuid import uuid4
import pandas as pd
import random
import pokemon
#Blockchain class
class Blockchain(object):
def __init__(self,node):
#Existing Blockchain
self.chain=[]
self.user = ""
#Current trades pending
self.current_trade = []
self.nodes = set()
self.node = node
self.currentmining = False
self.pokedex = pd.read_csv('Kanto.csv')['Rarity']
self.rew1=[]
self.rew2=[]
self.rew3=[]
self.rew4=[]
self.rew5=[]
i = 1
for p in self.pokedex:
if p == 1:
self.rew1.append(i)
if p == 2:
self.rew2.append(i)
if p == 3:
self.rew3.append(i)
if p == 4:
self.rew4.append(i)
if p == 5:
self.rew5.append(i)
i = i + 1
self.rew2 = self.rew1 + self.rew2
self.rew3 = self.rew3 + self.rew2
self.rew4 = self.rew4 + self.rew3
self.rew5 = self.rew5 + self.rew4
#Create genesis block
self.new_block(proof=0,previous_hash='0')
self.tradereqs=[]
self.offers = []
def own_pokes(self):
caughtpokes=[]
rem=[]
print("Own pokemon referenced")
for block in self.chain:
if block['miner'] == self.node:
caughtpokes.append(block['rew'])
for trade in block['trade']:
if trade['trainer1'] == self.node:
caughtpokes.append(int(trade['sentby2']))
rem.append(int(trade['sentby1']))
elif trade['trainer2'] == self.node:
caughtpokes.append(int(trade['sentby1']))
rem.append(int(trade['sentby2']))
for x in rem:
caughtpokes.remove(x)
print("Own pokemon received")
return caughtpokes
def other_pokes(self,nodel):
caughtpokes=[]
rem=[]
print("Other pokemon referenced")
for block in self.chain:
if block['miner'] == nodel:
caughtpokes.append(block['rew'])
for trade in block['trade']:
if trade['trainer1'] == nodel:
caughtpokes.append(int(trade['sentby2']))
rem.append(int(trade['sentby1']))
elif trade['trainer2'] == nodel:
caughtpokes.append(int(trade['sentby1']))
rem.append(int(trade['sentby2']))
for x in rem:
caughtpokes.remove(x)
print("Pokemon referenced")
return caughtpokes
def nodereg(self,node):
self.nodes.add(node)
def new_block(self,proof,previous_hash = None):
#Create a new block after mining in the blockchain
#previous_hash = None for the genesis block
trans = len(self.current_trade)
block = {
'index':len(self.chain) + 1,
'timestamp': time(),
'trade': self.current_trade,
'proof': proof,
'previous_hash': previous_hash or self.hash(self.chain[-1]),
'miner':self.node
}
self.current_trade=[]
if trans == 0:
rew = random.choice(self.rew1)
elif trans < 5:
rew = random.choice(self.rew2)
elif trans < 10:
rew = random.choice(self.rew3)
elif trans < 20:
rew = random.choice(self.rew4)
else:
rew = random.choice(self.rew5)
block['rew']=rew;
self.chain.append(block)
print("Block mined and added to chain")
return block
def new_transaction(self,trainer1,trainer2,sentby1,sentby2,timesent):
#creates a new trade to go into next mined block
ctrade={
'trainer1': trainer1,
'trainer2': trainer2,
'sentby1': sentby1,
'sentby2' : sentby2,
'time' : timesent
}
self.current_trade.append(ctrade)
return self.last_block['index'] + 1
def proof_of_work(self,last_proof):
#Simple proof of work algorithm:
proof = 0;
print("Mining going on")
lastb = self.hash(self.chain[-1])
while self.valid_proof(last_proof,proof,lastb) is False:
proof +=1
print("Proofofwork found")
return proof
def valid_proof(self,last_proof,proof,lasthash):
guess = str(str(last_proof+proof)+str(lasthash)).encode()
guess_hash = hashlib.sha256(guess).hexdigest()
return guess_hash[:4] == '0000'
@staticmethod
def hash(block):
#Creates a SHA-256 hash of a block
block_string = json.dumps(block,sort_keys=True).encode()
return hashlib.sha256(block_string).hexdigest()
@property
def last_block(self):
# Returns the last Block in the chain
return self.chain[-1]
```
#### File: 123tarunanand/PokemonTrading/nodetracker.py
```python
import json
from time import sleep
import requests
from flask import Flask,jsonify,request,render_template,redirect
import ast
import sys
app = Flask(__name__)
nodes=set()
if len(sys.argv)!=2:
print("Port number not passed")
exit()
@app.route('/nodes',methods=['POST'])
def register_node():
data = ast.literal_eval((request.data).decode('utf-8'))
data = data['Node']
print(data)
for node in nodes:
requests.post(node,json.dumps({'Node':data,'New':'True'}))
requests.post(data,json.dumps({'Node':node,'New':'False'}))
nodes.add(data)
return jsonify("Received")
if __name__ == '__main__':
port = int(sys.argv[1])
app.run(host='0.0.0.0', port=port)
``` |
{
"source": "123weizheng/blog",
"score": 2
} |
#### File: blog/post/models.py
```python
from django.db import models
from user.models import User
class Post(models.Model):
uid = models.IntegerField()
title = models.CharField(max_length=64)
created = models.DateTimeField(auto_now_add=True)
content = models.TextField()
@property
def auth(self):
'''帖子的作者'''
if not hasattr(self, '_auth'):
self._auth = User.objects.get(id=self.uid)
return self._auth
def comments(self):
'''帖子的所有评论'''
return Comment.objects.filter(post_id=self.id).order_by('-id')
def tags(self):
'''帖子对应的所有 tag'''
relations = PostTagRelation.objects.filter(post_id=self.id).only('tag_id') # 取出与post与tag的关系
tag_id_list = [r.tag_id for r in relations] # 取出对应的 tag id 列表
return Tag.objects.filter(id__in=tag_id_list) # 返回对应的 tag
def update_tags(self, tag_names):
'''更新 post 对应的 tag'''
updated_tags = set(Tag.ensure_tags(tag_names))
current_tags = set(self.tags())
# 找出尚未建立关联的 tag
need_create_tags = updated_tags - current_tags
need_create_tag_id_list = [t.id for t in need_create_tags]
PostTagRelation.add_relations(self.id, need_create_tag_id_list)
# 找出需要删除关联的 tag
need_delete_tags = current_tags - updated_tags
need_delete_tag_id_list = [t.id for t in need_delete_tags]
PostTagRelation.del_relations(self.id, need_delete_tag_id_list)
class Comment(models.Model):
uid = models.IntegerField()
post_id = models.IntegerField()
created = models.DateTimeField(auto_now_add=True)
content = models.TextField()
@property
def auth(self):
'''评论的作者'''
if not hasattr(self, '_auth'):
self._auth = User.objects.get(id=self.uid)
return self._auth
@property
def post(self):
'''评论对应的帖子'''
if not hasattr(self, '_post'):
self._post = Post.objects.get(id=self.post_id)
return self._post
class PostTagRelation(models.Model):
'''
post 与 tag 的关系表
使用Nginx做负载均衡 nginx
使用Nginx做负载均衡 linux
使用Nginx做负载均衡 web
Linux部署 linux
Linux部署 nginx
Linux部署 django
Python的魔术方法 python
'''
post_id = models.IntegerField()
tag_id = models.IntegerField()
@classmethod
def add_relations(cls, post_id, tag_id_list):
'''建立 post id 与 tags 的对应关系'''
new_relations = [cls(post_id=post_id, tag_id=tid) for tid in tag_id_list]
cls.objects.bulk_create(new_relations)
@classmethod
def del_relations(cls, post_id, tag_id_list):
cls.objects.filter(post_id=post_id, tag_id__in=tag_id_list).delete()
class Tag(models.Model):
name = models.CharField(max_length=16, unique=True)
@classmethod
def ensure_tags(cls, tag_names):
'''确保传入的 tag 已存在,如果不存在直接创建出来'''
exists = cls.objects.filter(name__in=tag_names) # 当前已存在的 tag
exist_names = set(tag.name for tag in exists) # 已存在的 tag 的 name
new_names = set(tag_names) - exist_names # 待创建的 tag 的 name
new_tags = [cls(name=n) for n in new_names] # 待创建的 tag
cls.objects.bulk_create(new_tags) # 批量提交、创建
return cls.objects.filter(name__in=tag_names)
def posts(self):
'''当前 tag 对应的所有 post'''
relations = PostTagRelation.objects.filter(tag_id=self.id).only('post_id') # 取出与tag与post的关系
post_id_list = [r.post_id for r in relations] # 取出对应的 post id 列表
return Post.objects.filter(id__in=post_id_list) # 返回对应的 post
``` |
{
"source": "123weizheng/python-script",
"score": 2
} |
#### File: python-script/PageSpeedInsights/psi.py
```python
import os
from googleapiclient.discovery import build
# Access Token, generated from GCP Console Credentials page.
API_KEY = os.getenv('GCP_API_KEY')
# For local development, setup http proxy as needed.
HTTP = None
URL = "https://m.ctrip.com/webapp/flight/schedule/detail.html"
def run(url):
pagespeedonline = build(
serviceName = 'pagespeedonline',
version = 'v5',
http = HTTP,
developerKey = API_KEY
)
response = pagespeedonline.pagespeedapi().runpagespeed(url = url).execute()
print(response)
return ('OK', 200)
def run_http(request):
request_json = request.get_json()
try:
url = request_json['url']
return run(url)
except KeyError:
return ('', 400)
def run_pubsub(event, context):
import base64
pubsub_message = base64.urlsafe_b64decode(event['data']).decode('utf-8')
run(pubsub_message)
return 'OK'
def test_run_http():
from flask import Request
_request = Request.from_values(json = { "url": URL })
run_http(_request)
def test_run_pubsub():
import base64
event = { "data": base64.urlsafe_b64encode(URL.encode('utf-8'))}
context = None
run_pubsub(event, context)
if __name__ == "__main__":
import httplib2
HTTP = httplib2.Http(proxy_info = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_SOCKS5, '127.0.0.1', 1086))
test_run_http()
test_run_pubsub()
``` |
{
"source": "123weizheng/wenshu_utils",
"score": 3
} |
#### File: wenshu_utils/tests/test_wzws.py
```python
import unittest
import requests
from wenshu_utils.vl5x.args import Vl5x, Number, Guid
from wenshu_utils.wzws.decrypt import wzws_decrypt
class TestWZWS(unittest.TestCase):
def setUp(self):
self.error_msg = "请开启JavaScript并刷新该页"
self.session = requests.Session()
self.session.headers.update({
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36",
})
def tearDown(self):
self.session.close()
def test_list(self):
response = self.session.get("http://wenshu.court.gov.cn/list/list/")
text = response.content.decode()
if self.error_msg in text:
redirect_url1 = wzws_decrypt(text, url=response.url)
redirect_url2 = wzws_decrypt(text)
self.assertEqual(redirect_url1, redirect_url2)
_ = self.session.get(redirect_url1)
url = "http://wenshu.court.gov.cn/List/ListContent"
data = {
"Param": "关键词:合同",
"Index": 1,
"Page": 10,
"Order": "法院层级",
"Direction": "asc",
"vl5x": Vl5x(self.session.cookies["vjkl5"]),
"number": Number(),
"guid": Guid(),
}
response = self.session.post(url, data=data)
self.assertNotIn(self.error_msg, response.content.decode())
print(response.text)
def test_detail(self):
url = "http://wenshu.court.gov.cn/CreateContentJS/CreateContentJS.aspx"
params = {
"DocID": "13d4c01a-0734-4ec1-bbac-658f8bb8ec62",
}
response = self.session.get(url, params=params)
text = response.content.decode()
if self.error_msg in text:
redirect_url1 = wzws_decrypt(text, url=response.url)
redirect_url2 = wzws_decrypt(text)
self.assertEqual(redirect_url1, redirect_url2)
response = self.session.get(redirect_url1)
self.assertNotIn(self.error_msg, response.content.decode())
print(response.text)
if __name__ == '__main__':
unittest.main()
```
#### File: wenshu_utils/docid/decrypt.py
```python
from Cryptodome.Cipher import AES
from Cryptodome.Util.Padding import unpad
from ._unzip import unzip
IV = b"abcd134556abcedf"
def decrypt_doc_id(doc_id: str, key: bytes) -> str:
result = unzip(doc_id)
for _ in range(2):
result = _decrypt(result, key)
return result.decode()
def _decrypt(data: bytes, key: bytes, iv: bytes = IV) -> bytes:
"""pycryptodomex库解密"""
cipher = AES.new(key=key, mode=AES.MODE_CBC, iv=iv)
plaintext = cipher.decrypt(bytes.fromhex(data.decode()))
result = unpad(plaintext, AES.block_size)
return result
def _decrypt2(data: bytes, key: bytes, iv: bytes = IV) -> bytes:
"""cryptography库解密"""
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.primitives.padding import PKCS7
from cryptography.hazmat.backends import default_backend
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
decryptor = cipher.decryptor()
decrypted = decryptor.update(bytes.fromhex(data.decode())) + decryptor.finalize()
pkcs7 = PKCS7(algorithms.AES.block_size)
unpadder = pkcs7.unpadder()
result = unpadder.update(decrypted) + unpadder.finalize()
return result
```
#### File: wenshu_utils/wzws/decrypt.py
```python
import base64
import re
from urllib import parse
import execjs
from execjs.runtime_names import Node
from lxml.etree import HTML
def wzws_decrypt(text: str, url: str = None) -> str:
"""
:param text: 提示"请开启JavaScript并刷新该页"的响应text
:param url: 当前请求的url,如果提供url将使用Python实现的算法计算结果,速度很多
如果不传url,将调用外部的nodejs解析得到结果,会慢一些
url示例: http://wenshu.court.gov.cn/CreateContentJS/CreateContentJS.aspx?DocID=13d4c01a-0734-4ec1-bbac-658f8bb8ec62
:return: 重定向url,访问重定向url后会返回wzws_cid的cookie和正确的响应
"""
if url is None:
base_url = "http://wenshu.court.gov.cn"
custom_js = """
window = {};
document = {
createElement: () => ({ style: "", appendChild: () => ({}), submit: () => ({}) }),
body: { appendChild: obj => { window.location = obj.action } }
};
atob = str => Buffer.from(str, "base64").toString("binary");
get_location = () => window.location;
"""
html = HTML(text)
js = html.xpath("//script/text()")[0]
ctx = execjs.get(Node).compile(custom_js + js)
location = ctx.call("get_location")
redirect_url = parse.urljoin(base_url, location)
else:
prefix_url = "http://wenshu.court.gov.cn/WZWSRE"
parse_result = parse.urlparse(url)
request_path = (parse_result.path + "?" + parse_result.query) if parse_result.query else parse_result.path
encoded_path = base64.b64encode(request_path.encode()).decode()
question, factor = re.search(r'wzwsquestion="(.+?)".+wzwsfactor="(\d+)"', text).groups()
challenge = "WZWS_CONFIRM_PREFIX_LABEL{}".format(sum(ord(i) for i in question) * int(factor) + 111111)
query_params = "wzwschallenge={}".format(base64.b64encode(challenge.encode()).decode())
redirect_url = prefix_url + encoded_path + "?" + query_params
return redirect_url
``` |
{
"source": "123zbt/PySODEvalToolkit",
"score": 2
} |
#### File: PySODEvalToolkit/tools/converter.py
```python
import argparse
import importlib.util
import os
import sys
from itertools import chain
import numpy as np
parser = argparse.ArgumentParser(
description="A useful and convenient tool to convert your .npy results into the table code in latex."
)
parser.add_argument(
"-i",
"--result-file",
required=True,
nargs="+",
action="extend",
help="The path of the *_metrics.npy file.",
)
parser.add_argument(
"-o", "--tex-file", required=True, type=str, help="The path of the exported tex file."
)
parser.add_argument(
"-c", "--config-file", type=str, help="The path of the customized config file."
)
parser.add_argument(
"--contain-table-env",
action="store_true",
help="Whether to containe the table env in the exported code.",
)
parser.add_argument(
"--transpose",
action="store_true",
help="Whether to transpose the table.",
)
args = parser.parse_args()
def update_dict(parent_dict, sub_dict):
for sub_k, sub_v in sub_dict.items():
if sub_k in parent_dict:
if sub_v is not None and isinstance(sub_v, dict):
update_dict(parent_dict=parent_dict[sub_k], sub_dict=sub_v)
continue
parent_dict.update(sub_dict)
results = {}
for result_file in args.result_file:
result = np.load(file=result_file, allow_pickle=True).item()
update_dict(results, result)
impossible_up_bound = 1
impossible_down_bound = 0
# 读取数据
dataset_names = sorted(list(results.keys()))
metric_names = ["SM", "wFm", "MAE", "adpF", "avgF", "maxF", "adpE", "avgE", "maxE"]
method_names = sorted(list(set(chain(*[list(results[n].keys()) for n in dataset_names]))))
if args.config_file is not None:
assert args.config_file.endswith(".py")
module_name = os.path.basename(args.config_file)
spec = importlib.util.spec_from_file_location(module_name, args.config_file)
module = importlib.util.module_from_spec(spec)
if module_name in sys.modules:
print(f"{module_name} has existed in sys.modules")
else:
sys.modules[module_name] = module
print(f"{module_name} is loaded.")
spec.loader.exec_module(module)
if "dataset_names" not in module.__dict__:
print(
"`dataset_names` doesnot be contained in your config file, so we use the default config."
)
else:
dataset_names = module.__dict__["dataset_names"]
if "metric_names" not in module.__dict__:
print(
"`metric_names` doesnot be contained in your config file, so we use the default config."
)
else:
metric_names = module.__dict__["metric_names"]
if "method_names" not in module.__dict__:
print(
"`method_names` doesnot be contained in your config file, so we use the default config."
)
else:
method_names = module.__dict__["method_names"]
print(
f"CONFIG INFORMATION:\n - DATASETS: {dataset_names}]\n - METRICS: {metric_names}\n - METHODS: {method_names}"
)
# 整理表格
ori_columns = []
column_for_index = []
for dataset_idx, dataset_name in enumerate(dataset_names):
for metric_idx, metric_name in enumerate(metric_names):
fiiled_value = (
impossible_up_bound if metric_name.lower() == "mae" else impossible_down_bound
)
fiiled_dict = {k: fiiled_value for k in metric_names}
ori_column = [
results[dataset_name].get(method_name, fiiled_dict)[metric_name]
for method_name in method_names
]
column_for_index.append([x * round(1 - fiiled_value * 2) for x in ori_column])
ori_columns.append(ori_column)
style_templates = dict(
method_row_body="& {method_name}",
method_column_body=" {method_name}",
dataset_row_body="& \multicolumn{{{num_metrics}}}{{c}}{{\\textbf{{{dataset_name}}}}}",
dataset_column_body="\multirow{{-{num_metrics}}}{{*}}{{\\rotatebox{{90}}{{\\textbf{{{dataset_name}}}}}",
dataset_head=" ",
metric_body="& {metric_name}",
metric_row_head=" ",
metric_column_head="& ",
body=[
"& {{\color{{reda}} \\textbf{{{txt:.03f}}}}}", # top1
"& {{\color{{mygreen}} \\textbf{{{txt:.03f}}}}}", # top2
"& {{\color{{myblue}} \\textbf{{{txt:.03f}}}}}", # top3
"& {txt:.03f}", # other
],
)
# 排序并添加样式
def replace_cell(ori_value, k):
if ori_value == impossible_up_bound or ori_value == impossible_down_bound:
new_value = "& "
else:
new_value = style_templates["body"][k].format(txt=ori_value)
return new_value
for col, ori_col in zip(column_for_index, ori_columns):
col_array = np.array(col).reshape(-1)
sorted_col_array = np.sort(np.unique(col_array), axis=-1)[-3:][::-1]
# [top1_idxes, top2_idxes, top3_idxes]
top_k_idxes = [np.argwhere(col_array == x).tolist() for x in sorted_col_array]
for k, idxes in enumerate(top_k_idxes):
for row_idx in idxes:
ori_col[row_idx[0]] = replace_cell(ori_col[row_idx[0]], k)
for idx, x in enumerate(ori_col):
if not isinstance(x, str):
ori_col[idx] = replace_cell(x, -1)
# 构建表头
num_datasets = len(dataset_names)
num_metrics = len(metric_names)
num_methods = len(method_names)
# 先构开头的列,再整体构造开头的行
latex_table_head = []
latex_table_tail = []
if not args.transpose:
dataset_row = (
[style_templates["dataset_head"]]
+ [
style_templates["dataset_row_body"].format(num_metrics=num_metrics, dataset_name=x)
for x in dataset_names
]
+ [r"\\"]
)
metric_row = (
[style_templates["metric_row_head"]]
+ [style_templates["metric_body"].format(metric_name=x) for x in metric_names]
* num_datasets
+ [r"\\"]
)
additional_rows = [dataset_row, metric_row]
# 构建第一列
method_column = [
style_templates["method_column_body"].format(method_name=x) for x in method_names
]
additional_columns = [method_column]
columns = additional_columns + ori_columns
rows = [list(row) + [r"\\"] for row in zip(*columns)]
rows = additional_rows + rows
if args.contain_table_env:
column_style = "|".join([f"*{num_metrics}{{c}}"] * len(dataset_names))
latex_table_head = [
f"\\begin{{tabular}}{{l|{column_style}}}\n",
"\\toprule[2pt]",
]
else:
dataset_column = []
for x in dataset_names:
blank_cells = [" "] * (num_metrics - 1)
dataset_cell = [
style_templates["dataset_column_body"].format(num_metrics=num_metrics, dataset_name=x)
]
dataset_column.extend(blank_cells + dataset_cell)
metric_column = [
style_templates["metric_body"].format(metric_name=x) for x in metric_names
] * num_datasets
additional_columns = [dataset_column, metric_column]
method_row = (
[style_templates["dataset_head"], style_templates["metric_column_head"]]
+ [style_templates["method_row_body"].format(method_name=x) for x in method_names]
+ [r"\\"]
)
additional_rows = [method_row]
additional_columns = [list(x) for x in zip(*additional_columns)]
rows = [cells + row + [r"\\"] for cells, row in zip(additional_columns, ori_columns)]
rows = additional_rows + rows
if args.contain_table_env:
column_style = "".join([f"*{{{num_methods}}}{{c}}"])
latex_table_head = [
f"\\begin{{tabular}}{{cc|{column_style}}}\n",
"\\toprule[2pt]",
]
if args.contain_table_env:
latex_table_tail = [
"\\bottomrule[2pt]\n",
"\\end{tabular}",
]
rows = [latex_table_head] + rows + [latex_table_tail]
with open(args.tex_file, mode="w", encoding="utf-8") as f:
for row in rows:
f.write("".join(row) + "\n")
``` |
{
"source": "123zhangzq/HW_DPDP",
"score": 2
} |
#### File: src/utils/logging_engine.py
```python
import logging
import sys
class LoggingEngine:
def __init__(self, level="debug", contents=None, logger_name=None):
self.logging_level_dict = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL
}
logging_level = self.logging_level_dict.get(level.lower(), logging.DEBUG)
if contents is None:
contents = ["asctime", "levelname", "funcName", "lineno", "message"]
if logger_name is None:
logger_name = 'logging_engine'
logging_fmt = "%(asctime)s [%(filename)-15s | %(lineno)d] %(levelname)s: %(message)s"
# logging_fmt = " - ".join([f"%({content})s" for content in contents])
logger = logging.getLogger(logger_name)
logger.setLevel(level=logging_level)
formatter = logging.Formatter(logging_fmt)
if not logger.handlers:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
logger.addHandler(handler)
self.logger = logger
self.logger_name = logger_name
self.handlers = {}
self.formatter = formatter
self.import_log_funcs()
def import_log_funcs(self):
log_funcs = ['debug', 'info', 'warning', 'error', 'critical', 'exception']
for func_name in log_funcs:
func = getattr(self.logger, func_name)
setattr(self, func_name, func)
def add_file_output(self, filename: str, level='info', mode="w"):
if filename not in self.handlers:
handler = logging.FileHandler(filename, mode=mode, encoding='UTF-8')
handler.setFormatter(self.formatter)
handler.setLevel(self.logging_level_dict.get(level.lower(), logging.DEBUG))
self.handlers[filename] = handler
self.logger.addHandler(handler)
def remove_file_handler(self, file_path):
if file_path in self.handlers:
self.logger.removeHandler(self.handlers.get(file_path))
def debug(self, msg: str):
pass
def info(self, msg: str):
pass
def warning(self, msg: str):
pass
def error(self, msg: str):
pass
def critical(self, msg: str):
pass
def exception(self, msg: str):
pass
logger = LoggingEngine(logger_name="glob_logging_engine",
level="info")
def test_log():
log = LoggingEngine(level="debug",
contents=["asctime", "levelname", "filename", "lineno", "funcName", "message"])
log.info("Hello World!")
``` |
{
"source": "1244919208/Maverick-Theme-Galileo",
"score": 2
} |
#### File: 1244919208/Maverick-Theme-Galileo/utils.py
```python
import os
import json
from Maverick.Config import g_conf
from Maverick.Utils import unify_joinpath, safe_read, filterPlaceholders
translation = None
def tr(str, locale="english"):
"""translation support
translate str according to translation file
"""
global translation
if translation is None:
path = unify_joinpath(os.path.dirname(
__file__) + '/locale', g_conf.language+".json")
translation = json.loads(safe_read(path) or '{}')
return translation.get(str, str)
def build_links(links):
fp = filterPlaceholders
str = '<span class="separator">·</span>'.join(['<li><a class="no-style" title="%s" href="%s" target="_blank"><i class="%s"></i>%s</a></li>'
% (fp(item['name']), fp(item['url']), fp(item['icon']), fp(item['name'])) for item in links])
return '<ul>%s</ul>' % str
def build_navs(navs):
fp = filterPlaceholders
list = ['<li><a class="ga-highlight" href="%s" target="%s">%s</a></li>'
% (fp(item['url']), fp(item['target']), fp(item['name'])) for item in navs]
list.append(
'<li><a href="#" target="_self" class="search-form-input ga-highlight">%s</a></li>' % tr('Search'))
return '<ul>%s</ul>' % ('<span class="separator">·</span>'.join(list))
def filterPrefix(url: str):
"""replace prefix with `/`, to fix Valine view counting
"""
return url.replace(g_conf.site_prefix, "/")
``` |
{
"source": "12520054/pybot",
"score": 3
} |
#### File: chatterbot/logic/closest_meaning.py
```python
from .base_match import BaseMatchAdapter
class ClosestMeaningAdapter(BaseMatchAdapter):
"""
This adapter selects a response by comparing the tokenized form of the
input statement's text, with the tokenized form of possible matching
statements. For each possible match, the sum of the Cartesian product of
the path similarity of each statement is compared. This process simulates
an evaluation of the closeness of synonyms. The known statement with the
greatest path similarity is then returned.
"""
def __init__(self, **kwargs):
super(ClosestMeaningAdapter, self).__init__(**kwargs)
from chatterbot.conversation.comparisons import synset_distance
self.compare_statements = kwargs.get(
'statement_comparison_function',
synset_distance
)
```
#### File: chatterbot/logic/mathematical_evaluation.py
```python
from __future__ import unicode_literals
from chatterbot.logic import LogicAdapter
from chatterbot.conversation import Statement
import re
import os
import json
import decimal
class MathematicalEvaluation(LogicAdapter):
"""
The MathematicalEvaluation logic adapter parses input to
determine whether the user is asking a question that requires
math to be done. If so, MathematicalEvaluation goes through a
set of steps to parse the input and extract the equation that
must be solved. The steps, in order, are:
1) Normalize input: Remove punctuation and other irrelevant data
2) Convert words to numbers
3) Extract the equation
4) Simplify the equation
5) Solve the equation & return result
"""
def __init__(self, **kwargs):
super(MathematicalEvaluation, self).__init__(**kwargs)
language = kwargs.get('math_words_language', 'english')
self.math_words = self.get_language_data(language)
def get_language_data(self, language):
"""
Load language-specific data
"""
from chatterbot.corpus import Corpus
corpus = Corpus()
math_words_data_file_path = corpus.get_file_path(
'chatterbot.corpus.{}.math_words'.format(language),
extension='json'
)
try:
with open(math_words_data_file_path) as data:
return json.load(data)
except IOError:
raise self.UnrecognizedLanguageException(
'A math_words data file was not found for `{}` at `{}`.'.format(
language, math_words_data_file_path
)
)
def can_process(self, statement):
"""
Determines whether it is appropriate for this
adapter to respond to the user input.
"""
confidence, response = self.process(statement)
return confidence == 1
def process(self, statement):
"""
Takes a statement string.
Returns the simplified statement string
with the mathematical terms "solved".
"""
input_text = statement.text
# Getting the mathematical terms within the input statement
expression = str(self.simplify_chunks(self.normalize(input_text)))
# Returning important information
try:
expression += "= " + str(eval(expression))
# return a confidence of 1 if the expression could be evaluated
return 1, Statement(expression)
except:
return 0, Statement(expression)
def simplify_chunks(self, input_text):
"""
Separates the incoming text.
"""
string = ''
for chunk in input_text.split():
is_chunk_integer = self.is_integer(chunk)
if is_chunk_integer is False:
is_chunk_float = self.is_float(chunk)
if is_chunk_float is False:
is_chunk_operator = self.is_operator(chunk)
if is_chunk_operator is not False:
string += str(is_chunk_operator) + ' '
else:
string += str(is_chunk_float) + ' '
else:
string += str(is_chunk_integer) + ' '
return string
def is_float(self, string):
"""
If the string is a float, returns
the float of the string. Otherwise,
it returns False.
"""
try:
return decimal.Decimal(string)
except decimal.DecimalException:
return False
def is_integer(self, string):
"""
If the string is an integer, returns
the int of the string. Otherwise,
it returns False.
"""
try:
return int(string)
except:
return False
def is_operator(self, string):
"""
If the string is an operator, returns
said operator. Otherwise, it returns false.
"""
if string in "+-/*^()":
return string
else:
return False
def normalize(self, string):
"""
Normalizes input text, reducing errors
and improper calculations.
"""
# If the string is empty, just return it
if len(string) is 0:
return string
# Setting all words to lowercase
string = string.lower()
# Removing punctuation
if not string[-1].isalnum():
string = string[:-1]
# Removing words
string = self.substitute_words(string)
# Returning normalized text
return string
def substitute_words(self, string):
"""
Substitutes numbers for words.
"""
condensed_string = '_'.join(string.split())
for word in self.math_words["words"]:
condensed_string = re.sub(
'_'.join(word.split(' ')),
self.math_words["words"][word],
condensed_string
)
for number in self.math_words["numbers"]:
condensed_string = re.sub(
number,
str(self.math_words["numbers"][number]),
condensed_string
)
for scale in self.math_words["scales"]:
condensed_string = re.sub(
"_" + scale,
" " + self.math_words["scales"][scale],
condensed_string
)
condensed_string = condensed_string.split('_')
for chunk_index in range(0, len(condensed_string)):
value = ""
try:
value = str(eval(condensed_string[chunk_index]))
condensed_string[chunk_index] = value
except:
pass
for chunk_index in range(0, len(condensed_string)):
if self.is_integer(condensed_string[chunk_index]) or self.is_float(condensed_string[chunk_index]):
i = 1
start_index = chunk_index
end_index = -1
while (chunk_index + i < len(condensed_string) and (self.is_integer(condensed_string[chunk_index + i]) or self.is_float(condensed_string[chunk_index + i]))):
end_index = chunk_index + i
i += 1
for sub_chunk in range(start_index, end_index):
condensed_string[sub_chunk] += " +"
condensed_string[start_index] = "( " + condensed_string[start_index]
condensed_string[end_index] += " )"
return ' '.join(condensed_string)
class UnrecognizedLanguageException(Exception):
def __init__(self, value='The specified language was not recognized'):
self.value = value
def __str__(self):
return repr(self.value)
```
#### File: chatterbot/logic/multi_adapter.py
```python
from __future__ import unicode_literals
from collections import Counter
from .logic_adapter import LogicAdapter
class MultiLogicAdapter(LogicAdapter):
"""
MultiLogicAdapter allows ChatterBot to use multiple logic
adapters. It has methods that allow ChatterBot to add an
adapter, set the chat bot, and process an input statement
to get a response.
"""
def __init__(self, **kwargs):
super(MultiLogicAdapter, self).__init__(**kwargs)
self.adapters = []
def process(self, statement):
"""
Returns the outout of a selection of logic adapters
for a given input statement.
:param statement: The input statement to be processed.
"""
results = []
result = None
max_confidence = -1
for adapter in self.adapters:
if adapter.can_process(statement):
confidence, output = adapter.process(statement)
results.append((confidence, output, ))
self.logger.info(
'{} selected "{}" as a response with a confidence of {}'.format(
str(adapter.__class__), output.text, confidence
)
)
if confidence > max_confidence:
result = output
max_confidence = confidence
else:
self.logger.info(
'Not processing the statement using {}'.format(
str(adapter.__class__)
)
)
# If multiple adapters agree on the same statement,
# then that statement is more likely to be the correct response
if len(results) >= 3:
statements = [s[1] for s in results]
count = Counter(statements)
most_common = count.most_common()
if most_common[0][1] > 1:
result = most_common[0][0]
max_confidence = self.get_greatest_confidence(result, results)
return max_confidence, result
def get_greatest_confidence(self, statement, options):
"""
Returns the greatest confidence value for a statement that occurs
multiple times in the set of options.
:param statement: A statement object.
:param options: A tuple in the format of (confidence, statement).
"""
values = []
for option in options:
if option[1] == statement:
values.append(option[0])
return max(values)
def add_adapter(self, adapter):
"""
Appends a logic adapter to the list of logic adapters being used.
:param adapter: The logic adapter to be added.
:type adapter: LogicAdapter
"""
self.adapters.append(adapter)
def set_chatbot(self, chatbot):
"""
Set the chatbot for each of the contained logic adapters.
"""
super(MultiLogicAdapter, self).set_chatbot(chatbot)
for adapter in self.adapters:
adapter.set_chatbot(chatbot)
```
#### File: chatterbot/output/microsoft.py
```python
from __future__ import unicode_literals
from .output_adapter import OutputAdapter
import requests
import json
class Microsoft(OutputAdapter):
"""
An output adapter that allows a ChatterBot instance to send
responses to a Micorsoft bot using *Direct Line client protocol*.
"""
def __init__(self, **kwargs):
super(Microsoft, self).__init__(**kwargs)
self.directline_host = kwargs.get('directline_host',
'https://directline.botframework.com')
self.direct_line_token_or_secret = kwargs.get\
('direct_line_token_or_secret')
self.conversation_id = kwargs.get('conversation_id')
authorization_header = 'BotConnector {}'.\
format(self.direct_line_token_or_secret)
self.headers = {
'Authorization': authorization_header,
'Content-Type': 'application/json'
}
def _validate_status_code(self, response):
status_code = response.status_code
if status_code not in [200, 204]:
raise self.HTTPStatusException('{} status code recieved'.
format(status_code))
def get_most_recent_message(self):
endpoint = '{host}/api/conversations/{id}/messages'\
.format(host=self.directline_host,
id=self.conversation_id)
response = requests.get(
endpoint,
headers=self.headers,
verify=False
)
self.logger.info('{} retrieving most recent messages {}'.format(
response.status_code, endpoint
))
self._validate_status_code(response)
data = response.json()
if data['messages']:
last_msg = int(data['watermark'])
return data['messages'][last_msg-1]
return None
def send_message(self, conversation_id, message):
"""
Send a message to a HipChat room.
https://www.hipchat.com/docs/apiv2/method/send_message
"""
message_url = "{host}/api/conversations/{conversationId}/messages".\
format(host=self.directline_host, conversationId=conversation_id)
response = requests.post(
message_url,
headers=self.headers,
data=json.dumps({
'message': message
})
)
self.logger.info('{} sending message {}'.format(
response.status_code, message_url
))
self._validate_status_code(response)
# Microsoft return 204 on operation succeeded and no content was returned.
return self.get_most_recent_message()
def process_response(self, statement, confidence=None):
data = self.send_message(self.conversation_id, statement.text)
self.logger.info('processing user response {}'.format(data))
return statement
class HTTPStatusException(Exception):
"""
Exception raised when unexpected non-success HTTP
status codes are returned in a response.
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
```
#### File: pybot/chatterbot/utils.py
```python
def clean_whitespace(text):
"""
Remove any extra whitespace and line breaks as needed.
"""
import re
# Replace linebreaks with spaces
text = text.replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
# Remove any leeding or trailing whitespace
text = text.strip()
# Remove consecutive spaces
text = re.sub(' +', ' ', text)
return text
def clean(text):
"""
A function for cleaning a string of text.
Returns valid ASCII characters.
"""
import unicodedata
import sys
text = clean_whitespace(text)
# Remove links from message
# text = re.sub(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', '', text)
# Replace HTML escape characters
if sys.version_info[0] < 3:
from HTMLParser import HTMLParser
parser = HTMLParser()
text = parser.unescape(text)
else:
import html
text = html.unescape(text)
# Normalize unicode characters
# 'raw_input' is just 'input' in python3
if sys.version_info[0] < 3:
text = unicode(text)
text = unicodedata.normalize('NFKD', text).encode('ascii', 'ignore').decode('utf-8')
return str(text)
def import_module(dotted_path):
"""
Imports the specified module based on the
dot notated import path for the module.
"""
import importlib
module_parts = dotted_path.split('.')
module_path = '.'.join(module_parts[:-1])
module = importlib.import_module(module_path)
return getattr(module, module_parts[-1])
def input_function():
"""
Normalizes reading input between python 2 and 3.
The function 'raw_input' becomes 'input' in Python 3.
"""
import sys
if sys.version_info[0] < 3:
user_input = str(raw_input())
# Avoid problems using format strings with unicode characters
if user_input:
user_input = user_input.decode('utf-8')
else:
user_input = input()
return user_input
def nltk_download_corpus(corpus_name):
"""
Download the specified NLTK corpus file
unless it has already been downloaded.
Returns True if the corpus needed to be downloaded.
"""
from nltk.data import find
from nltk import download
# Download the wordnet data only if it is not already downloaded
zip_file = '{}.zip'.format(corpus_name)
downloaded = False
try:
find(zip_file)
except LookupError:
download(corpus_name)
downloaded = True
return downloaded
def remove_stopwords(tokens, language):
"""
Takes a language (i.e. 'english'), and a set of word tokens.
Returns the tokenized text with any stopwords removed.
Stop words are words like "is, the, a, ..."
"""
from nltk.corpus import stopwords
# Get the stopwords for the specified language
stop_words = stopwords.words(language)
# Remove the stop words from the set of word tokens
tokens = set(tokens) - set(stop_words)
return tokens
``` |
{
"source": "1254517211/test-1",
"score": 3
} |
#### File: test-1/test/downloadTingWa.py
```python
import urllib.request
import os
import time
import datetime
def url_open(url):
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36')
page = urllib.request.urlopen(req)
html = page.read()
return html
def download(title,post,url):
filename = title + "." +post
with open(filename, 'wb') as f:
music = url_open(url)
f.write(music)
def download_mai():
start = 528
baseUrl = "http://www.itingwa.com/listen/"
while True:
now = datetime.datetime.now()
now = now.strftime('%Y-%m-%d %H:%M:%S')
print(now)
print(start)
url = baseUrl + str(start)
html = url_open(url).decode('utf-8')
a = html.find("页面未找到")
if a != -1:
time.sleep(3)
start = start + 1
continue
a = html.find("frame1")
a = html.find("<h1>", a) + 4
b = html.find("<a href", a)
title = str(start) + " - " + html[a:b].strip()
title = title.replace('*','-')
print(title)
a = html.find("<div id=\"tw_player\"", b)
a = html.find("http", a)
b = html.find("</div>", a) - 2
downUrl = html[a:b]
print(downUrl)
post = downUrl[-3:]
print(post)
try:
download(title,post,downUrl)
print("begin to sleep")
time.sleep(2)
except ConnectionResetError:
start = start - 1
start = start + 1
if __name__ == '__main__':
download_mai()
``` |
{
"source": "12564985/DeFMO",
"score": 3
} |
#### File: DeFMO/dataloaders/loader.py
```python
import torch
from torchvision import transforms
from PIL import Image
import os
import random
from main_settings import *
import pdb
class ShapeBlurDataset(torch.utils.data.Dataset):
def __init__(self, dataset_folder=g_dataset_folder, render_objs = g_render_objs, number_per_category=g_number_per_category, do_augment=False, use_latent_learning=g_use_latent_learning):
self.timestamps = torch.linspace(0,1,g_fmo_steps)
self.dataset_folder = dataset_folder
self.render_objs = render_objs
self.number_per_category = number_per_category
self.do_augment = do_augment
self.use_latent_learning = use_latent_learning
def __len__(self):
return len(self.render_objs)*self.number_per_category
def __getitem__(self, index):
# inputs, gt_paths = get_training_sample(render_objs=self.render_objs, max_obj=self.number_per_category, dataset_folder=self.dataset_folder)
objname = int(index / self.number_per_category)
objid = (index % self.number_per_category) + 1
inputs, gt_paths = get_training_sample(render_objs=[self.render_objs[objname]], min_obj=objid, max_obj=objid, dataset_folder=self.dataset_folder, use_latent_learning=self.use_latent_learning)
perm = torch.randperm(int(g_fmo_steps/2))
inds = perm[:int(g_fmo_train_steps/2)]
inds,_ = inds.sort()
inds = torch.cat((inds, (g_fmo_steps-1)-torch.flip(inds,[0])), 0)
times = self.timestamps[inds]
inds_left = perm[int(g_fmo_train_steps/2):]
inds_left = torch.cat((inds_left, (g_fmo_steps-1)-torch.flip(inds_left,[0])), 0)
times_left = self.timestamps[inds_left]
hs_frames = []
for ind in inds:
gt_batch = get_gt_sample(gt_paths, ind)
hs_frames.append(gt_batch)
hs_frames = torch.stack(hs_frames,0).contiguous()
if self.do_augment:
if random.random() > 0.5:
inputs = torch.flip(inputs, [-1])
hs_frames = torch.flip(hs_frames, [-1])
if random.random() > 0.5:
inputs = torch.flip(inputs, [-2])
hs_frames = torch.flip(hs_frames, [-2])
return inputs, times, hs_frames, times_left
def get_transform():
if g_normalize:
return transforms.Compose([transforms.ToTensor(),transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])
else:
return transforms.ToTensor()
def get_training_sample(render_objs = g_render_objs, min_obj=1, max_obj=g_number_per_category, dataset_folder=g_dataset_folder, use_latent_learning=False):
gt_paths = []
while True:
obj = random.choice(render_objs)
times = random.randint(min_obj,max_obj)
filename = os.path.join(dataset_folder, obj, "{}_{:04d}.png".format(obj, times))
if g_use_median:
bgr_path = os.path.join(dataset_folder, obj, "GT", "{}_{:04d}".format(obj, times), "bgr_med.png")
if not g_use_median or not os.path.exists(bgr_path):
bgr_path = os.path.join(dataset_folder, obj, "GT", "{}_{:04d}".format(obj, times), "bgr.png")
if not os.path.exists(filename) or not os.path.exists(bgr_path):
print('Something does not exist: {} or {}'.format(filename, bgr_path))
continue
I = Image.open(filename)
B = Image.open(bgr_path)
preprocess = get_transform()
if use_latent_learning:
I2 = Image.open(os.path.join(dataset_folder, obj, "diffbgr", "{:04d}_im.png".format(times)))
if g_use_median:
B2 = Image.open(os.path.join(dataset_folder, obj, "diffbgr", "{:04d}_bgrmed.png".format(times)))
else:
B2 = Image.open(os.path.join(dataset_folder, obj, "diffbgr", "{:04d}_bgr.png".format(times)))
input_batch = torch.cat((preprocess(I), preprocess(B), preprocess(I2), preprocess(B2)), 0)
else:
input_batch = torch.cat((preprocess(I), preprocess(B)), 0)
for ki in range(g_fmo_steps):
gt_paths.append(os.path.join(dataset_folder, obj, "GT", "{}_{:04d}".format(obj, times), "image-{:06d}.png".format(ki+1)))
return input_batch, gt_paths
def get_gt_sample(gt_paths, ti):
GT = Image.open(gt_paths[ti])
preprocess = transforms.ToTensor()
gt_batch = preprocess(GT)
return gt_batch
def get_dataset_statistics(dataset_folder=g_dataset_folder):
nobj = 0
all_times = []
all_objs_max = []
for obj in g_render_objs:
times = 0
while True:
filename = os.path.join(dataset_folder, obj, "{}_{:04d}.png".format(obj, times+1))
bgr_path = os.path.join(dataset_folder, obj, "GT", "{}_{:04d}".format(obj, times+1), "bgr.png")
if not os.path.exists(filename) or not os.path.exists(bgr_path):
break
times += 1
print('Object {} has {} instances'.format(obj, times))
all_times.append(times)
if times > 0:
nobj += 1
if times == g_number_per_category:
all_objs_max.append(obj)
print('Number of objects {}'.format(len(g_render_objs)))
print('Number of non-zero objects {}'.format(nobj))
print(all_times)
print(all_objs_max)
print(len(all_objs_max))
```
#### File: DeFMO/helpers/torch_helpers.py
```python
import torch
from torch.nn import functional as F
from torchvision.utils import save_image
from skimage.measure import label, regionprops
import os
import cv2
import numpy as np
from main_settings import *
import matplotlib.pyplot as plt
from PIL import Image
import pdb
def renders2traj(renders,device):
masks = renders[:,:,-1]
sumx = torch.sum(masks,-2)
sumy = torch.sum(masks,-1)
cenx = torch.sum(sumy*torch.arange(1,sumy.shape[-1]+1)[None,None].float().to(device),-1) / torch.sum(sumy,-1)
ceny = torch.sum(sumx*torch.arange(1,sumx.shape[-1]+1)[None,None].float().to(device),-1) / torch.sum(sumx,-1)
est_traj = torch.cat((cenx.unsqueeze(-1),ceny.unsqueeze(-1)),-1)
return est_traj
def renders2traj_bbox(renders_rgba):
masks = renders_rgba[:,:,-1]
est_traj = []
for ti in range(masks.shape[2]):
th = np.min([0.1, 0.5*np.max(masks[:,:,ti])])
dI = (masks[:,:,ti] >= th).astype(float)
labeled = label(dI)
regions = regionprops(labeled)
areas = [reg.area for reg in regions]
region = regions[np.argmax(areas)]
bbox = np.array(region.bbox)
est_traj = np.r_[est_traj, bbox[:2] + (bbox[2:]-bbox[:2])/2]
est_traj = np.reshape(est_traj, (-1,2)).T
return est_traj
def write_latent(rendering, latent, device, folder=g_temp_folder,steps=g_fmo_steps,videoname='output.avi'):
write_video = True
write_images = False
eps = 0
out = None
with torch.no_grad():
times = torch.linspace(0+eps,1-eps,steps).to(device)
renders = rendering(latent,times[None])
for ki in range(renders.shape[1]):
ti = times[ki]
if write_images:
save_image(renders[0,ki].clone(), os.path.join(folder, 'latent{:04d}.png'.format(int(ti*100))))
if write_video:
if out is None:
out = cv2.VideoWriter(os.path.join(folder, videoname),cv2.VideoWriter_fourcc(*"MJPG"), 6, (renders.shape[4], renders.shape[3]),True)
im4 = renders[0,ki].data.cpu().detach().numpy().transpose(1,2,0)
im = im4[:,:,[2,1,0]] * im4[:,:,3:] + 1* (1 - im4[:,:,3:])
out.write( (im * 255).astype(np.uint8) )
if write_video:
out.release()
return renders
def write_gt(gt_paths, folder=g_temp_folder, bgr_clr = 1):
write_video = True
out = None
renders = []
for ti in range(len(gt_paths)):
im4 = np.array(Image.open(gt_paths[ti]))/255
renders.append(im4[np.newaxis].copy())
if out is None:
out = cv2.VideoWriter(os.path.join(folder, 'output_gt.avi'),cv2.VideoWriter_fourcc(*"MJPG"), 6, (im4.shape[1], im4.shape[0]),True)
im = im4[:,:,[2,1,0]] * im4[:,:,3:] + bgr_clr* (1 - im4[:,:,3:])
out.write( (im.copy() * 255).astype(np.uint8) )
out.release()
renders = np.stack(renders,1)
renders = torch.from_numpy(renders).float().permute(0,1,4,2,3)
return renders
def write_gt_masks(gt_paths, folder=g_temp_folder, bgr_clr = 1):
write_video = True
out = None
renders = []
for ti in range(len(gt_paths)):
im4 = np.array(Image.open(gt_paths[ti]))/255
renders.append(im4[np.newaxis].copy())
if out is None:
out = cv2.VideoWriter(os.path.join(folder, 'output_masks_gt.avi'),cv2.VideoWriter_fourcc(*"MJPG"), 6, (im4.shape[1], im4.shape[0]),True)
im = (im4[:,:,[3,3,3]])
if bgr_clr == 1:
im = 1 - im
out.write( (im.copy() * 255).astype(np.uint8) )
out.release()
renders = np.stack(renders,1)
renders = torch.from_numpy(renders).float().permute(0,1,4,2,3)
return renders
def get_figure(encoder, rendering, device, val_batch):
latent = encoder(val_batch)
times = [0, 1]
fig = plt.figure() # figsize=(12, 48)
nidx = len(times)
for idx in np.arange(nidx):
t_tensor = torch.FloatTensor([times[idx]]).to(device).repeat(latent.shape[0], 1, latent.shape[2], latent.shape[3])
result = rendering(torch.cat((t_tensor,latent),1)).cpu().numpy()
ax = fig.add_subplot(1, nidx, idx+1, xticks=[], yticks=[])
plt.imshow(np.transpose(result[0], (1, 2, 0)))
ax.set_title("t = {}".format(times[idx]))
return fig
def get_images(encoder, rendering, device, val_batch):
with torch.no_grad():
latent = encoder(val_batch)
times = torch.linspace(0,1,2).to(device)
renders = rendering(latent,times[None])
renders = renders.cpu().numpy()
renders = renders[:,:,3:4]*(renders[:,:,:3]-1)+1
return renders
def normalized_cross_correlation_channels(image1, image2):
mean1 = image1.mean([2,3,4],keepdims=True)
mean2 = image2.mean([2,3,4],keepdims=True)
std1 = image1.std([2,3,4],unbiased=False,keepdims=True)
std2 = image2.std([2,3,4],unbiased=False,keepdims=True)
eps=1e-8
bs, ts, *sh = image1.shape
N = sh[0]*sh[1]*sh[2]
im1b = ((image1-mean1)/(std1*N+eps)).view(bs*ts, sh[0], sh[1], sh[2])
im2b = ((image2-mean2)/(std2+eps)).reshape(bs*ts, sh[0], sh[1], sh[2])
padding = (0,) + tuple(side // 10 for side in sh[1:])
result = F.conv3d(im1b[None], im2b[:,None], padding=padding, bias=None, groups=bs*ts)
ncc = result.view(bs*ts, -1).max(1)[0].view(bs, ts)
return ncc
def normalized_cross_correlation(image1, image2):
mean1 = image1.mean([2,3],keepdims=True)
mean2 = image2.mean([2,3],keepdims=True)
std1 = image1.std([2,3],unbiased=False,keepdims=True)
std2 = image2.std([2,3],unbiased=False,keepdims=True)
eps=1e-8
bs, ts, *sh = image1.shape
N = sh[0]*sh[1]
im1b = ((image1-mean1)/(std1*N+eps)).view(bs*ts, sh[0], sh[1])
im2b = ((image2-mean2)/(std2+eps)).reshape(bs*ts, sh[0], sh[1])
padding = tuple(side // 10 for side in sh)
result = F.conv2d(im1b[None], im2b[:,None], padding=padding, bias=None, groups=bs*ts)
ncc = result.view(bs*ts, -1).max(1)[0].view(bs, ts)
return ncc
```
#### File: DeFMO/renderer/render_fmo.py
```python
import sys
import os
import random
import pickle
import bpy
import glob
import numpy as np
from mathutils import Vector
from mathutils import Euler
import cv2
from PIL import Image
from skimage.draw import line_aa
from scipy import signal
from skimage.measure import regionprops
# import moviepy.editor as mpy
from array2gif import write_gif
abs_path = os.path.abspath(__file__)
sys.path.append(os.path.dirname(abs_path))
from render_helper import *
from settings import *
import settings
import pdb
def renderTraj(pars, H):
## Input: pars is either 2x2 (line) or 2x3 (parabola)
if pars.shape[1] == 2:
pars = np.concatenate( (pars, np.zeros((2,1))),1)
ns = 2
else:
ns = 5
ns = np.max([2, ns])
rangeint = np.linspace(0,1,ns)
for timeinst in range(rangeint.shape[0]-1):
ti0 = rangeint[timeinst]
ti1 = rangeint[timeinst+1]
start = pars[:,0] + pars[:,1]*ti0 + pars[:,2]*(ti0*ti0)
end = pars[:,0] + pars[:,1]*ti1 + pars[:,2]*(ti1*ti1)
start = np.round(start).astype(np.int32)
end = np.round(end).astype(np.int32)
rr, cc, val = line_aa(start[0], start[1], end[0], end[1])
valid = np.logical_and(np.logical_and(rr < H.shape[0], cc < H.shape[1]), np.logical_and(rr > 0, cc > 0))
rr = rr[valid]
cc = cc[valid]
val = val[valid]
if len(H.shape) > 2:
H[rr, cc, 0] = 0
H[rr, cc, 1] = 0
H[rr, cc, 2] = val
else:
H[rr, cc] = val
return H
def open_log(temp_folder = g_temp): # redirect output to log file
logfile = os.path.join(temp_folder,'blender_render.log')
try:
os.remove(logfile)
except OSError:
pass
open(logfile, 'a').close()
old = os.dup(1)
sys.stdout.flush()
os.close(1)
os.open(logfile, os.O_WRONLY)
return old
def close_log(old): # disable output redirection
os.close(1)
os.dup(old)
os.close(old)
def clear_mesh():
""" clear all meshes in the secene
"""
bpy.ops.object.select_all(action='DESELECT')
for obj in bpy.data.objects:
if obj.type == 'MESH':
obj.select = True
bpy.ops.object.delete()
for block in bpy.data.meshes:
if block.users == 0:
bpy.data.meshes.remove(block)
for block in bpy.data.materials:
if block.users == 0:
bpy.data.materials.remove(block)
for block in bpy.data.textures:
if block.users == 0:
bpy.data.textures.remove(block)
for block in bpy.data.images:
if block.users == 0:
bpy.data.images.remove(block)
def scene_setting_init(use_gpu):
"""initialize blender setting configurations
"""
sce = bpy.context.scene.name
bpy.data.scenes[sce].render.engine = g_engine_type
bpy.data.scenes[sce].cycles.film_transparent = g_use_film_transparent
#output
bpy.data.scenes[sce].render.image_settings.color_mode = g_rgb_color_mode
bpy.data.scenes[sce].render.image_settings.color_depth = g_rgb_color_depth
bpy.data.scenes[sce].render.image_settings.file_format = g_rgb_file_format
bpy.data.scenes[sce].render.use_overwrite = g_depth_use_overwrite
bpy.data.scenes[sce].render.use_file_extension = g_depth_use_file_extension
if g_ambient_light:
world = bpy.data.worlds['World']
world.use_nodes = True
bg = world.node_tree.nodes['Background']
bg.inputs[0].default_value[:3] = g_bg_color
bg.inputs[1].default_value = 1.0
#dimensions
bpy.data.scenes[sce].render.resolution_x = g_resolution_x
bpy.data.scenes[sce].render.resolution_y = g_resolution_y
bpy.data.scenes[sce].render.resolution_percentage = g_resolution_percentage
if use_gpu:
bpy.data.scenes[sce].render.engine = 'CYCLES' #only cycles engine can use gpu
bpy.data.scenes[sce].render.tile_x = g_hilbert_spiral
bpy.data.scenes[sce].render.tile_x = g_hilbert_spiral
bpy.context.user_preferences.addons['cycles'].preferences.devices[0].use = False
bpy.context.user_preferences.addons['cycles'].preferences.devices[1].use = True
ndev = len(bpy.context.user_preferences.addons['cycles'].preferences.devices)
print('Number of devices {}'.format(ndev))
for ki in range(2,ndev):
bpy.context.user_preferences.addons['cycles'].preferences.devices[ki].use = False
bpy.context.user_preferences.addons['cycles'].preferences.compute_device_type = 'CUDA'
# bpy.types.CyclesRenderSettings.device = 'GPU'
bpy.data.scenes[sce].cycles.device = 'GPU'
def node_setting_init():
bpy.context.scene.use_nodes = True
tree = bpy.context.scene.node_tree
links = tree.links
for node in tree.nodes:
tree.nodes.remove(node)
render_layer_node = tree.nodes.new('CompositorNodeRLayers')
image_output_node = tree.nodes.new('CompositorNodeOutputFile')
image_output_node.base_path = g_syn_rgb_folder
links.new(render_layer_node.outputs[0], image_output_node.inputs[0])
# image_output_node = bpy.context.scene.node_tree.nodes[1]
image_output_node.base_path = g_temp
image_output_node.file_slots[0].path = 'image-######.png' # blender placeholder #
def render(obj_path, viewpoint, temp_folder):
"""render rbg image
render a object rgb image by a given camera viewpoint and
choose random image as background, only render one image
at a time.
Args:
obj_path: a string variable indicate the obj file path
viewpoint: a vp parameter(contains azimuth,elevation,tilt angles and distance)
"""
vp = viewpoint
cam_location = camera_location(vp.azimuth, vp.elevation, vp.distance)
cam_rot = camera_rot_XYZEuler(vp.azimuth, vp.elevation, vp.tilt)
cam_obj = bpy.data.objects['Camera']
cam_obj.location[0] = cam_location[0]
cam_obj.location[1] = cam_location[1]
cam_obj.location[2] = cam_location[2]
cam_obj.rotation_euler[0] = cam_rot[0]
cam_obj.rotation_euler[1] = cam_rot[1]
cam_obj.rotation_euler[2] = cam_rot[2]
if not os.path.exists(g_syn_rgb_folder):
os.mkdir(g_syn_rgb_folder)
obj = bpy.data.objects['model_normalized']
ni = g_fmo_steps
maxlen = 0.5
maxrot = 1.57/6
tri = 0
# rot_base = np.array([math.pi/2,0,0])
while tri <= g_max_trials:
do_repeat = False
tri += 1
if not g_apply_texture:
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
for tempi in range(len(bpy.data.objects[oi].data.materials)):
if bpy.data.objects[oi].data.materials[tempi].alpha != 1.0:
return True, True ## transparent object
los_start = Vector((random.uniform(-maxlen/10, maxlen/10), random.uniform(-maxlen, maxlen), random.uniform(-maxlen, maxlen)))
loc_step = Vector((random.uniform(-maxlen/10, maxlen/10), random.uniform(-maxlen, maxlen), random.uniform(-maxlen, maxlen)))/ni
rot_base = np.array((random.uniform(0, 2*math.pi), random.uniform(0, 2*math.pi), random.uniform(0, 2*math.pi)))
rot_step = np.array((random.uniform(-maxrot, maxrot), random.uniform(-maxrot, maxrot), random.uniform(-maxrot, maxrot)))/ni
old = open_log(temp_folder)
for ki in [0, ni-1]+list(range(1,ni-1)):
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
bpy.data.objects[oi].location = los_start + loc_step*ki
bpy.data.objects[oi].rotation_euler = Euler(rot_base + (rot_step*ki))
bpy.context.scene.frame_set(ki + 1)
bpy.ops.render.render(write_still=True) #start rendering
if ki == 0 or ki == (ni-1):
Mt = cv2.imread(os.path.join(bpy.context.scene.node_tree.nodes[1].base_path,'image-{:06d}.png'.format(ki+1)),cv2.IMREAD_UNCHANGED)[:,:,-1] > 0
is_border = ((Mt[0,:].sum()+Mt[-1,:].sum()+Mt[:,0].sum()+Mt[:,-1].sum()) > 0) or Mt.sum()==0
if is_border:
if ki == 0:
close_log(old)
return False, True ## sample different starting viewpoint
else:
do_repeat = True ## just sample another motion direction
if do_repeat:
break
close_log(old)
if do_repeat == False:
break
if do_repeat: ## sample different starting viewpoint
return False, True
return False, False
def make_fmo(path, gt_path, video_path):
n_im = 5
background_images = os.listdir(g_background_image_path)
seq_name = random.choice(background_images)
seq_images = glob.glob(os.path.join(g_background_image_path,seq_name,"*.jpg"))
if len(seq_images) <= n_im:
seq_images = glob.glob(os.path.join(g_background_image_path,seq_name,"*.png"))
seq_images.sort()
bgri = random.randint(n_im,len(seq_images)-1)
bgr_path = seq_images[bgri]
B0 = cv2.imread(bgr_path)/255
B = cv2.resize(B0, dsize=(int(g_resolution_x*g_resolution_percentage/100), int(g_resolution_y*g_resolution_percentage/100)), interpolation=cv2.INTER_CUBIC)
B[B > 1] = 1
B[B < 0] = 0
FH = np.zeros(B.shape)
MH = np.zeros(B.shape[:2])
pars = np.array([[(B.shape[0]-1)/2-1, (B.shape[1]-1)/2-1], [1.0, 1.0]]).T
FM = np.zeros(B.shape[:2]+(4,g_fmo_steps,))
centroids = np.zeros((2,g_fmo_steps))
for ki in range(g_fmo_steps):
FM[:,:,:,ki] = cv2.imread(os.path.join(gt_path,'image-{:06d}.png'.format(ki+1)),cv2.IMREAD_UNCHANGED)/g_rgb_color_max
props = regionprops((FM[:,:,-1,ki]>0).astype(int))
if len(props) != 1:
return False
centroids[:,ki] = props[0].centroid
for ki in range(g_fmo_steps):
F = FM[:,:,:-1,ki]*FM[:,:,-1:,ki]
M = FM[:,:,-1,ki]
if ki < g_fmo_steps-1:
pars[:,1] = centroids[:,ki+1] - centroids[:,ki]
H = renderTraj(pars, np.zeros(B.shape[:2]))
H /= H.sum()*g_fmo_steps
for kk in range(3):
FH[:,:,kk] += signal.fftconvolve(H, F[:,:,kk], mode='same')
MH += signal.fftconvolve(H, M, mode='same')
Im = FH + (1 - MH)[:,:,np.newaxis]*B
Im[Im > 1] = 1
Im[Im < 0] = 0
if g_skip_low_contrast:
Diff = np.sum(np.abs(Im - B),2)
meanval = np.mean(Diff[MH > 0.05])
print("Contrast {}".format(meanval))
if meanval < 0.2:
return False
if g_skip_small:
sizeper = np.sum(MH > 0.01)/(MH.shape[0]*MH.shape[1])
print("Size percentage {}".format(sizeper))
if sizeper < 0.05:
return False
Im = Im[:,:,[2,1,0]]
Ims = Image.fromarray((Im * 255).astype(np.uint8))
Ims.save(path)
Ball = np.zeros(B.shape+(n_im,))
Ball[:,:,:,0] = B
for ki in range(1,n_im):
bgrki_path = seq_images[bgri-ki]
Ball[:,:,:,ki] = cv2.resize(cv2.imread(bgrki_path)/255, dsize=(int(g_resolution_x*g_resolution_percentage/100), int(g_resolution_y*g_resolution_percentage/100)), interpolation=cv2.INTER_CUBIC)
Ball[Ball > 1] = 1
Ball[Ball < 0] = 0
Bmed = np.median(Ball,3)
Image.fromarray((B[:,:,[2,1,0]] * 255).astype(np.uint8)).save(os.path.join(gt_path,'bgr.png'))
Image.fromarray((Bmed[:,:,[2,1,0]] * 255).astype(np.uint8)).save(os.path.join(gt_path,'bgr_med.png'))
# Ims.save(os.path.join(g_temp,"I.png"))
# Image.fromarray((FH * 255)[:,:,[2,1,0]].astype(np.uint8)).save(os.path.join(g_temp,"FH.png"))
# Image.fromarray((MH * 255).astype(np.uint8)).save(os.path.join(g_temp,"MH.png"))
# Image.fromarray((M * 255).astype(np.uint8)).save(os.path.join(g_temp,"M.png"))
# Image.fromarray((F * 255)[:,:,[2,1,0]].astype(np.uint8)).save(os.path.join(g_temp,"F.png"))
# Image.fromarray((B0 * 255)[:,:,[2,1,0]].astype(np.uint8)).save(os.path.join(g_temp,"B.png"))
if False:
Fwr = FM[:,:,:-1,:] * FM[:,:,-1:,:] + 1 * (1 - FM[:,:,-1:,:])
Fwr = (Fwr * 255).astype(np.uint8)
# Fwr[np.repeat(FM[:,:,-1:,:]==0,3,2)]=255
out = cv2.VideoWriter(video_path,cv2.VideoWriter_fourcc(*"MJPG"), 6, (F.shape[1],F.shape[0]),True)
for ki in range(g_fmo_steps):
out.write(Fwr[:,:,:,ki])
out.release()
return True
def render_obj(obj_path, path, objid, obj_name, temp_folder):
""" render one obj file by a given viewpoint list
a wrapper function for render()
Args:
obj_path: a string variable indicate the obj file path
"""
vps_path = random.sample(g_view_point_file, 1)[0]
vps = list(load_viewpoint(vps_path))
random.shuffle(vps)
save_path = os.path.join(path,"{}_{:04d}.png".format(obj_name,objid))
gt_path = os.path.join(path,"GT","{}_{:04d}".format(obj_name,objid))
video_path = os.path.join(path,"{}_{:04d}.avi".format(obj_name,objid))
if not os.path.exists(gt_path):
os.mkdir(gt_path)
image_output_node = bpy.context.scene.node_tree.nodes[1]
image_output_node.base_path = gt_path
for imt in bpy.data.images:
bpy.data.images.remove(imt)
if g_apply_texture:
for oi in range(len(bpy.data.objects)):
if bpy.data.objects[oi].type == 'CAMERA' or bpy.data.objects[oi].type == 'LAMP':
continue
bpy.context.scene.objects.active = bpy.data.objects[oi]
# pdb.set_trace()
# for m in bpy.data.materials:
# bpy.data.materials.remove(m)
# bpy.ops.object.material_slot_remove()
bpy.ops.object.editmode_toggle()
bpy.ops.uv.cube_project()
bpy.ops.object.editmode_toggle()
texture_images = os.listdir(g_texture_path)
texture = random.choice(texture_images)
tex_path = os.path.join(g_texture_path,texture)
# mat = bpy.data.materials.new(texture)
# mat.use_nodes = True
# nt = mat.node_tree
# nodes = nt.nodes
# links = nt.links
# # Image Texture
# textureNode = nodes.new("ShaderNodeTexImage")
# textureNode.image = bpy.data.images.load(tex_path)
# links.new(nodes['Diffuse BSDF'].inputs['Color'], textureNode.outputs['Color'])
# mat.specular_intensity = 0
# bpy.data.objects[oi].active_material = mat
# print(bpy.data.objects[oi].active_material)
for mat in bpy.data.materials:
nodes = mat.node_tree.nodes
links = mat.node_tree.links
textureNode = nodes.new("ShaderNodeTexImage")
textureNode.image = bpy.data.images.load(tex_path)
links.new(nodes['Diffuse BSDF'].inputs['Color'], textureNode.outputs['Color'])
# print(bpy.data.objects[oi].active_material)
tri = 0
while tri <= g_max_trials:
tri += 1
vp = random.sample(vps, 1)[0]
sample_different_object, sample_different_vp = render(obj_path, vp, temp_folder)
if sample_different_vp:
if sample_different_object:
print('Transparent object!')
return False
print('Rendering failed, repeating')
continue
success = make_fmo(save_path, gt_path, video_path)
if success:
return True
print('Making FMO failed, repeating')
return False
def init_all():
"""init everything we need for rendering
an image
"""
scene_setting_init(g_gpu_render_enable)
node_setting_init()
cam_obj = bpy.data.objects['Camera']
cam_obj.rotation_mode = g_rotation_mode
if g_render_light:
bpy.data.objects['Lamp'].data.energy = 50
bpy.ops.object.lamp_add(type='SUN')
bpy.data.objects['Sun'].data.energy = 5
### YOU CAN WRITE YOUR OWN IMPLEMENTATION TO GENERATE DATA
init_all()
argv = sys.argv
argv = argv[argv.index("--") + 1:]
start_index = int(argv[0])
step_index = int(argv[1])
print('Start index {}, step index {}'.format(start_index, step_index))
temp_folder = g_syn_rgb_folder+g_render_objs[start_index]+'/'
for obj_name in g_render_objs[start_index:(start_index+step_index)]:
print("Processing object {}".format(obj_name))
obj_folder = os.path.join(g_syn_rgb_folder, obj_name)
if not os.path.exists(obj_folder):
os.makedirs(obj_folder)
if not os.path.exists(os.path.join(obj_folder,"GT")):
os.mkdir(os.path.join(obj_folder,"GT"))
num = g_shapenet_categlory_pair[obj_name]
search_path = os.path.join(g_shapenet_path, num, '**','*.obj')
pathes = glob.glob(search_path, recursive=True)
random.shuffle(pathes)
objid = 1
tri = 0
while objid <= g_number_per_category:
print(" instance {}".format(objid))
clear_mesh()
path = random.sample(pathes, 1)[0]
old = open_log(temp_folder)
bpy.ops.import_scene.obj(filepath=path, axis_forward='-Z', axis_up='Y', filter_glob="*.obj;*.mtl", use_split_groups=False, use_split_objects=True)
# bpy.ops.import_scene.obj(filepath=path)
close_log(old)
#combine_objects()
#scale_objects(0.5)
result = render_obj(path, obj_folder, objid, obj_name, temp_folder)
if result:
objid += 1
tri = 0
else:
print('Error! Rendering another object from the category!')
tri += 1
if tri > g_max_trials:
print('No object find in the category!!!!!!!!!')
break
```
#### File: DeFMO/renderer/render_helper.py
```python
import os
import glob
import math
import random
from collections import namedtuple
from settings import *
# need to write outside the function, otherwise pickle can find
# where VP were defined
VP = namedtuple('VP',['azimuth', 'elevation', 'tilt', 'distance'])
Model = namedtuple('Model', ['path', 'vps'])
def load_viewpoint(viewpoint_file):
"""read viewpoints from a file, can only read one file at once
Args:
viewpoint_file: file path to viewpoint file, read only one file
for each function call
Returns:
generator of viewpoint parameters(contains azimuth,elevation,tilt angles and distance)
"""
with open(viewpoint_file) as viewpoints:
for line in viewpoints.readlines():
yield VP(*line.strip().split())
def load_viewpoints(viewpoint_file_list):
"""load multiple viewpoints file from given lists
Args:
viewpoint_file_list: a list contains obj path
a wrapper for load_viewpoint function
Returns:
return a generator contains multiple generators
which contains obj pathes
"""
if isinstance(viewpoint_file_list, str):
vp_file_list = [viewpoint_file_list]
try:
vp_file_list = iter(viewpoint_file_list)
except TypeError:
print("viewpoint_file_list is not an iterable object")
for vp_file in vp_file_list:
yield load_viewpoint(vp_file)
def load_object_lists(category=None):
"""
load object pathes according to the given category
Args:
category:a iterable object contains the category which
we want render
Returns:
generator of gnerators of obj file pathes
"""
#type checking
if not category:
category = g_render_objs
elif isinstance(category, str):
category = [category]
else:
try:
iter(category)
except TypeError:
print("category should be an iterable object")
#load obj file path
for cat in category:
num = g_shapenet_categlory_pair[cat]
search_path = os.path.join(g_shapenet_path, num, '**','*.obj')
yield glob.iglob(search_path, recursive=True)
def camera_location(azimuth, elevation, dist):
"""get camera_location (x, y, z)
you can write your own version of camera_location function
to return the camera loation in the blender world coordinates
system
Args:
azimuth: azimuth degree(object centered)
elevation: elevation degree(object centered)
dist: distance between camera and object(in meter)
Returens:
return the camera location in world coordinates in meters
"""
#convert azimuth, elevation degree to radians
phi = float(elevation) * math.pi / 180
theta = float(azimuth) * math.pi / 180
dist = float(dist)
x = dist * math.cos(phi) * math.cos(theta)
y = dist * math.cos(phi) * math.sin(theta)
z = dist * math.sin(phi)
return x, y, z
def camera_rot_XYZEuler(azimuth, elevation, tilt):
"""get camera rotaion in XYZEuler
Args:
azimuth: azimuth degree(object centerd)
elevation: elevation degree(object centerd)
tilt: twist degree(object centerd)
Returns:
return the camera rotation in Euler angles(XYZ ordered) in radians
"""
azimuth, elevation, tilt = float(azimuth), float(elevation), float(tilt)
x, y, z = 90, 0, 90 #set camera at x axis facing towards object
#twist
#if tilt > 0:
# y = tilt
#else:
# y = 360 + tilt
#latitude
x = x - elevation
#longtitude
z = z + azimuth
return x * math.pi / 180, y * math.pi / 180, z * math.pi / 180
def random_sample_objs(num_per_cat):
"""randomly sample object file from ShapeNet for each
category in global variable g_render_objs, and then
save the result in global variable g_obj_path
Args:
num_per_cat: how many obj file we want to sample per
category
Returns:
vps: a dictionary contains category name and its corresponding
obj file path
"""
obj_path_lists = load_object_lists(g_render_objs)
obj_path_dict = {}
for cat, pathes in zip(g_render_objs, obj_path_lists):
pathes = list(pathes)
random.shuffle(pathes)
samples = random.sample(pathes, num_per_cat)
obj_path_dict[cat] = samples
return obj_path_dict
def random_sample_vps(obj_path_dict, num_per_model):
"""randomly sample vps from vp lists, for each model,
we sample num_per_cat number vps, and save the result to
g_vps
Args:
obj_pathes: result of function random_sample_objs,
contains obj file pathes
num_per_cat: how many view point to sample per model
Returns:
result_dict: a dictionary contains model name and its corresponding
viewpoints
"""
if type(g_view_point_file) == set:
vp_file_lists = [name for name in g_view_point_file]
else:
vp_file_lists = [g_view_point_file[name] for name in g_render_objs]
viewpoint_lists = load_viewpoints(vp_file_lists)
obj_file_pathes = [obj_path_dict[name] for name in g_render_objs]
result_dict = {}
for cat, pathes, vps in zip(g_render_objs, obj_file_pathes, viewpoint_lists):
vps = list(vps)
random.shuffle(vps)
models = []
for p in pathes:
samples = random.sample(vps, num_per_model)
models.append(Model(p, samples))
result_dict[cat] = models
return result_dict
def random_sample_objs_and_vps(model_num_per_cat, vp_num_per_model):
"""wrapper function for randomly sample model and viewpoints
and return the result, each category in g_render_objs contains
multiple Model object, each Model object has path and vps attribute
path attribute indicates where the obj file is and vps contains
viewpoints to render the obj file
Args:
model_num_per_cat: how many models you want to sample per category
vp_num_per_model: how many viewpoints you want to sample per model
Returns:
return a dict contains Model objects
"""
obj_path_dict = random_sample_objs(model_num_per_cat)
result_dict = random_sample_vps(obj_path_dict, vp_num_per_model)
return result_dict
``` |
{
"source": "1259975740/Img2Txt",
"score": 3
} |
#### File: Img2Txt/Fun/pdf2txt.py
```python
import argparse
import logging
import sys
import pdfminer3.settings
pdfminer3.settings.STRICT = False
import pdfminer3.high_level
import pdfminer3.layout
from pdfminer3.image import ImageWriter
import tkinter as tk
from tkinter import messagebox as mBox
def extract_text(files=[], outfile='-',
_py2_no_more_posargs=None,
no_laparams=False, all_texts=None, detect_vertical=None, # LAParams
word_margin=None, char_margin=None, line_margin=None, boxes_flow=None, # LAParams
output_type='text', codec='utf-8', strip_control=False,
maxpages=0, page_numbers=None, password="", scale=1.0, rotation=0,
layoutmode='normal', output_dir=None, debug=False,
disable_caching=False, **other):
if _py2_no_more_posargs is not None:
raise ValueError("Too many positional arguments passed.")
if not files:
raise ValueError("Must provide files to work upon!")
if not no_laparams:
laparams = pdfminer3.layout.LAParams()
for param in ("all_texts", "detect_vertical", "word_margin", "char_margin", "line_margin", "boxes_flow"):
paramv = locals().get(param, None)
if paramv is not None:
setattr(laparams, param, paramv)
else:
laparams = None
imagewriter = None
if output_dir:
imagewriter = ImageWriter(output_dir)
if output_type == "text" and outfile != "-":
for override, alttype in ( (".htm", "html"),
(".html", "html"),
(".xml", "xml"),
(".tag", "tag") ):
if outfile.endswith(override):
output_type = alttype
if outfile == "-":
outfp = sys.stdout
if outfp.encoding is not None:
codec = 'utf-8'
else:
outfp = open(outfile, "wb")
for fname in files:
with open(fname, "rb") as fp:
pdfminer3.high_level.extract_text_to_fp(fp, **locals())
return outfp
def trans(files, outfile):
mBox.showinfo('提示', '运行中,请耐心等待\n 文件越复杂,运行时间越久哦(关了吧,没关系的)')
outfp = extract_text(files=files,outfile=outfile)
outfp.close()
mBox.showinfo('提示ʾ', '运行完毕')
if __name__ == '__main__':
trans(files=['E:/java-2020-03/eclipse/workspace/Img2Txt/Fun/test.pdf'],outfile='output.html')
```
#### File: Img2Txt/Fun/screenShot.py
```python
import tkinter as tk
from tkinter import filedialog as fd
import os
from os import path
from PIL import ImageGrab
from time import sleep
class ScreenShot:
def __init__(self,master,filename):
self._createWidge(master, filename)
#变量X和Y用来记录鼠标左键按下的位置
def _onLeftButtonDown(self,event):
self._X.set(event.x)
self._Y.set(event.y)
#开始截图
self._sel = True
def _onLeftButtonMove(self,event):
if not self._sel:
return
try:
#删除刚画完的图形,要不然鼠标移动的时候是黑乎乎的一片矩形
self._canvas.delete(self._lastDraw)
except Exception as e:
pass
self._lastDraw = self._canvas.create_rectangle(self._X.get(), self._Y.get(),
event.x, event.y, outline='black',
width = 5)
def _onLeftButtonUp(self,event):
self.fileName = os.path.join(os.path.dirname(__file__),'../Input')
self._sel = False
try:
self._canvas.delete(self._lastDraw)
except Exception as e:
pass
sleep(0.1)
#考虑鼠标左键从右下方按下而从左上方抬起的截图
leftSel, rightSel = sorted([self._X.get(), event.x])
topSel, bottomSel = sorted([self._Y.get(), event.y])
pic = ImageGrab.grab((leftSel+1,topSel,rightSel+1,bottomSel))
#弹出保存截图对话框
fDir = os.path.join(os.path.dirname(__file__),'../Input') #���ϼ��ļ�Ŀ¼��
self.fileName = fd.asksaveasfilename(title='保存截图',
filetypes=[('JPG files','*.jpg')],
initialdir=fDir)
#默认的文件夹呀!!
if self.fileName:
pic.save(self.fileName)
pic.close()
#关闭当前窗口
#print(left, ' ', top,' ',right,' ',bottom)
self.top.destroy()
print(self.fileName)
sleep(1)
def _createWidge(self,master,filename):
self.fileName = os.path.abspath(os.path.join( os.path.dirname(__file__),".."))+'\Input'
self._X = tk.IntVar(0)
self._Y = tk.IntVar(0)
#屏幕尺寸
screenWidth = master.winfo_screenwidth()
#print(screenWidth)
screenHeight = master.winfo_screenheight()
#print(screenHeight)
#创建顶级组件容器
self.top = tk.Toplevel(master, width=screenWidth, height=screenHeight)
#不显示最大化、最小化按钮
self.top.overrideredirect(True)
self._canvas = tk.Canvas(self.top,bg='white', width=screenWidth, height=screenHeight)
#显示全屏截图,在全屏截图上进行区域截图
self._img = tk.PhotoImage(file=filename)
self._canvas.create_image(screenWidth//2, screenHeight//2,image=self._img)
#鼠标左键按下的位置
self._canvas.bind('<Button-1>', self._onLeftButtonDown)
#鼠标左键移动,显示选取的区域
self._canvas.bind('<B1-Motion>', self._onLeftButtonMove)
#获取鼠标左键抬起的位置,保存区域截图
self._canvas.bind('<ButtonRelease-1>', self._onLeftButtonUp)
self._canvas.pack(fill=tk.BOTH, expand=tk.YES)
def buttonCaptureClick(master):
#最小化主窗口
master.state('icon')
sleep(0.5)
filename = 'screenShot.png'
im = ImageGrab.grab()
im.save(filename)
im.close()
#显示全屏幕截图
w = ScreenShot(master,filename)
# master.printScrBut.wait_window(w.top)
#截图结束,恢复主窗口,并删除临时的全屏幕截图文件
#label.config(text='Hello')
os.remove(filename)
print(w.fileName)
return w.fileName
```
#### File: Img2Txt/Fun/summary.py
```python
from pyhanlp import *
from tkinter import messagebox as mBox
import tkinter as tk
def summary(text,num,Widget):
text_list = HanLP.extractSummary(text,num)
text = '\n'.join([t for t in text_list])
Widget.insert(tk.INSERT,text)
mBox.showinfo('提示', '运行完毕')
if __name__ == "__main__":
summary_list = summary('“芦花滩上有扁舟, 俊杰黄昏独自游, 义到尽头原是命, 反躬逃难必无忧。”\
这是一首出自《水浒传》中吴用留下的藏头反诗;电视剧《裂变》中汉奸“蝙蝠”也曾\
使用数字对应书本页面和文字的方法传递消息给日寇;电影《暗算》中更是提到了黄依\
依解决多种密文的具体情节;甚至连动画片《名侦探柯南》也出现了 skytale 加密的细\
节。事实上,密文不仅存在于荧幕中,而且深入到生活的方方面面,例如用于存储互联\
网消息的 cookie、以及互联网安全中常提到的数字签名、在银行等网站上填写个人信息\
时,都会用一定的手段将明文加密成密文传输到在远处的服务器中,可以说,在互联网\
的世界里,只要有比特流动,就一定会有加密的存在。为此,各大高校还设立了专门的\
学科,如密码学、密码分析学、密码史等。不得不说,密码的发展更数学密切相关、大\
多数的密码学家都兼任数学家的身份,而密码学,这一学科在战争时代更是快速地发展。\
以下将会介绍密码学的发展史、以及一些经典密码学的典型加密方法和其对应的解密方\
法的介绍、文章最后会简单地提及现代密码学的一些实现手段',5)
print(summary_list)
```
#### File: Img2Txt/GUI/Tab3.py
```python
import tkinter as tk
from tkinter import ttk
import sys
from PIL.FontFile import WIDTH
sys.path.append('../')
from Fun.vert import verticalPrint
from tkinter import messagebox as mBox
from tkinter import scrolledtext
from threading import Thread
class Tab3():
def __init__(self,tab,i18n):
self._createWidget(tab,i18n)
def _createTransThread(self):
input_text = u''+str(self.inScrTxt.get(1.0,tk.END))
print(input_text)
width = int(self.widthEry.get())
run = Thread(target=verticalPrint,args=(input_text,width,self.outScrTxt))
run.setDaemon(True)
run.start()
def _trans(self):
self._createTransThread()
def _clean(self):
if mBox.askyesno("百变小T",
"确定清空输出框里的内容吗?"):
self.inScrTxt.delete(1.0,tk.END)
self.outScrTxt.delete(1.0,tk.END)
def _createWidget(self,tab,i18n):
self.zhuo = ttk.Frame(tab)
self.zhuo.grid(row=0,column=0)
self.widthFrm = ttk.Frame(self.zhuo)
self.widthFrm.grid(column=0,row=0,sticky='W',pady=6)
ttk.Label(self.widthFrm,text=i18n.width).grid(column=0,row=0)
self._width_default = tk.StringVar()
self._width_default.set(6)
self.widthEry = ttk.Entry(self.widthFrm,textvariable=self._width_default)
self.widthEry.grid(column=1,row=0)
#转换按钮
self.butFrm = ttk.Frame(self.zhuo)
self.butFrm.grid(column=0,row=2,sticky='W',pady=6)
self.transBut = ttk.Button(self.butFrm,text=i18n.transBut,
command=self._trans)
self.transBut.grid(column=1,row=0)
self.cleanBut = ttk.Button(self.butFrm,text=i18n.cleanBut,
command=self._clean)
self.cleanBut.grid(column=2,row=0)
for child in self.butFrm.winfo_children():
child.grid_configure(padx=6,pady=6,sticky='W')
#滑动文字展示栏
self.printOptFrm = ttk.Frame(self.zhuo)
self.printOptFrm.grid(row=1,column=0,sticky='W')
ttk.Label(self.printOptFrm,text=i18n.input_tab3).grid(column=0,row=0)
self.inScrTxt = scrolledtext.ScrolledText(self.printOptFrm,height=10,width=70,wrap=tk.WORD)
self.inScrTxt.grid(row=1,column=0,sticky='W',padx=6,pady=6)
self.inScrTxt.insert(tk.INSERT,"凡是到达了的地方,都I want to die, but I still to my life 属于昨天。哪怕那山再青,那水再秀,那风再温柔。带深的流连便成了一种羁绊,\
绊住的不仅是双脚,还有未来。可我的钱不够笑哭")
ttk.Label(self.printOptFrm,text=i18n.output_tab3).grid(column=0,row=2)
self.outScrTxt = scrolledtext.ScrolledText(self.printOptFrm,height=10,width=70,wrap=tk.WORD)
self.outScrTxt.grid(row=3,column=0,sticky='W',padx=6,pady=6)
for child in self.printOptFrm.winfo_children():
child.grid_configure(padx=6,pady=6,sticky='W')
``` |
{
"source": "1259975740/Word2Paper",
"score": 3
} |
#### File: dist/Fun/fun1.py
```python
from PIL import Image, ImageFont
import numpy as np
from handright import Template, handwrite
from multiprocessing import Pool
import time
from Fun.fun2 import *
from tkinter import messagebox as mBox
def trans(input_path,output_path,font_path,line_spacing,font_size,
fill,left_margin,top_margin,right_margin,bottom_margin,word_spacing,
disturb_x_sigma,disturb_y_sigma,disturb_theta_sigma,
line_spacing_sigma,font_size_sigma,word_spacing_sigma,background,if_test=False):
mBox.showinfo('提示', '运行中,请耐心等待\n 文件越复杂,运行时间越久哦')
background=Image.open(background)
width, height = background.size
background = background.resize((np.int(3*width),np.int(2.5*height)),resample=Image.LANCZOS)
if not if_test:
text = read_docx(input_path)
else:
text = """
卿寻鲤影剔浮英,
我恨浮英掩玉卿。
难教芳心知我心,
孤烛半影又天明。
"""
time_start = time.time()
template = Template(
background=background,
font_size=font_size,
font=ImageFont.truetype(font_path),
line_spacing=line_spacing,
fill=fill, # ���塰��ɫ��
left_margin=left_margin,
top_margin=-top_margin,
right_margin=right_margin,
bottom_margin=bottom_margin,
word_spacing=word_spacing,
line_spacing_sigma=line_spacing_sigma, # �м������Ŷ�
font_size_sigma=font_size_sigma, # �����С����Ŷ�
word_spacing_sigma=word_spacing_sigma, # �ּ������Ŷ�
end_chars=",。;、“”", # ��ֹ�ض��ַ����Ű��㷨���Զ����ж�����������
perturb_x_sigma=disturb_x_sigma, # �ʻ�����ƫ������Ŷ�
perturb_y_sigma=disturb_y_sigma, # �ʻ�����ƫ������Ŷ�
perturb_theta_sigma=disturb_theta_sigma, # �ʻ���תƫ������Ŷ�
)
images = handwrite(text, template)
for i, im in enumerate(images):
assert isinstance(im, Image.Image)
if not if_test:
im.save(output_path+"\{}.jpg".format(i))
else:
im.save(r"../test.jpg")
time_end = time.time()
print('本次运行共耗费: '+str(time_end-time_start)+' 秒')
mBox.showinfo('提示', '运行完毕')
``` |
{
"source": "1260228859/greentor",
"score": 2
} |
#### File: greentor/greentor/green.py
```python
from __future__ import absolute_import
import sys
import socket
import time
import errno
import greenlet
from functools import wraps
from collections import deque
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from tornado.ioloop import IOLoop
from tornado.concurrent import Future
from tornado.gen import coroutine, Return
from tornado.netutil import Resolver
from tornado.iostream import (IOStream as BaseIOStream, StreamClosedError,
_ERRNO_WOULDBLOCK)
IS_PYPY = False
try:
import __pypy__
__pypy__
IS_PYPY = True
except:
pass
def enable_debug():
if IS_PYPY:
sys.stderr.write("settrace api unsupported on pypy")
sys.stderr.flush()
return
import inspect
def trace_green(event, args):
src, target = args
if event == "switch":
print("from %s switch to %s" % (src, target))
elif event == "throw":
print("from %s throw exception to %s" % (src, target))
if src.gr_frame:
tracebacks = inspect.getouterframes(src.gr_frame)
buff = []
for traceback in tracebacks:
srcfile, lineno, func_name, codesample = traceback[1:-1]
trace_line = '''File "%s", line %d, in %s\n%s '''
buff.append(trace_line %
(srcfile, lineno, func_name, "".join(codesample)))
print("".join(buff))
greenlet.settrace(trace_green)
class GreenTask(greenlet.greenlet):
def __init__(self, run, *args, **kwargs):
super(GreenTask, self).__init__()
self._run = run
self._args = args
self._kwargs = kwargs
self._future = Future()
self._result = None
self._exc_info = ()
@property
def args(self):
return self._args
@property
def kwargs(self):
return self._kwargs
def run(self):
try:
timeout = self.kwargs.pop("timeout", 0)
if timeout:
timer = Timeout(timeout)
timer.start()
self._result = self._run(*self.args, **self.kwargs)
self._future.set_result(self._result)
except:
self._exc_info = sys.exc_info()
self._future.set_exc_info(self._exc_info)
finally:
if timeout:
timer.cancel()
def start(self):
self.switch()
def __str__(self):
func_name = "%s of %s " % (self._run.__name__, self._run.__module__)
return "<greenlet %s at %s>" % (func_name, hex(id(self)))
def __repr__(self):
return self.__str__()
def wait(self):
return self._future
@classmethod
def spawn(cls_green, *args, **kwargs):
task = cls_green(*args, **kwargs)
task.start()
return task
def synclize(func):
coro = coroutine(func)
@wraps(func)
def _sync_call(*args, **kwargs):
child_gr = greenlet.getcurrent()
main = child_gr.parent
assert main, "only run in child greenlet"
def callback(future):
if future.exc_info():
child_gr.throw(*future.exc_info())
elif future.exception():
child_gr.throw(future.exception())
else:
child_gr.switch(future.result())
IOLoop.current().add_future(coro(*args, **kwargs), callback)
return main.switch()
return _sync_call
def spawn(callable_obj, *args, **kwargs):
return GreenTask.spawn(callable_obj, *args, **kwargs).wait()
def green(func):
@wraps(func)
def wrapper(*args, **kwargs):
return GreenTask.spawn(func, *args, **kwargs).wait()
return wrapper
class Waiter(object):
def __init__(self):
self._greenlet = greenlet.getcurrent()
self._main = self._greenlet.parent
@property
def greenlet(self):
return self._greenlet
def switch(self, value):
self._greenlet.switch(value)
def throw(self, *exc_info):
self._greenlet.throw(*exc_info)
def get(self):
return self._main.switch()
def clear(self):
pass
def sleep(seconds):
waiter = Waiter()
unique = object()
IOLoop.current().add_timeout(time.time() + seconds, waiter.switch, unique)
waiter.get()
class TimeoutException(Exception):
pass
class Timeout(object):
def __init__(self, deadline, ex=TimeoutException):
self._greenlet = greenlet.getcurrent()
self._ex = ex
self._callback = None
self._deadline = deadline
self._delta = time.time() + deadline
self._ioloop = IOLoop.current()
def start(self, callback=None):
errmsg = "%s timeout, deadline is %d seconds" % (str(self._greenlet),
self._deadline)
if callback:
self._callback = self._ioloop.add_timeout(self._delta, callback,
self._ex(errmsg))
else:
self._callback = self._ioloop.add_timeout(
self._delta, self._greenlet.throw, self._ex(errmsg))
def cancel(self):
assert self._callback, "Timeout not started"
self._ioloop.remove_timeout(self._callback)
self._greenlet = None
class IOStream(BaseIOStream):
def _handle_events(self, fd, events):
if self._closed:
return
try:
if self._connecting:
self._handle_connect()
if self._closed:
return
if events & self.io_loop.READ:
self._handle_read()
if self._closed:
return
if events & self.io_loop.WRITE:
self._handle_write()
if self._closed:
return
if events & self.io_loop.ERROR:
self.error = self.get_fd_error()
self.io_loop.add_callback(self.close)
return
except Exception:
self.close(exc_info=True)
raise
def _handle_connect(self):
super(IOStream, self)._handle_connect()
if not self.closed():
self._state = self.io_loop.ERROR | self.io_loop.READ
self.io_loop.update_handler(self.fileno(), self._state)
def _handle_read(self):
chunk = True
while True:
try:
chunk = self.socket.recv(self.read_chunk_size)
if not chunk:
break
self._read_buffer.append(chunk)
self._read_buffer_size += len(chunk)
except (socket.error, IOError, OSError) as e:
en = e.errno if hasattr(e, 'errno') else e.args[0]
if en in _ERRNO_WOULDBLOCK:
break
if en == errno.EINTR:
continue
self.close(exc_info=True)
return
if self._read_future is not None and self._read_buffer_size >= self._read_bytes:
future, self._read_future = self._read_future, None
data = b"".join(self._read_buffer)
self._read_buffer.clear()
self._read_buffer_size = 0
self._read_bytes = 0
future.set_result(data)
if not chunk:
self.close()
return
def read(self, num_bytes):
assert self._read_future is None, "Already reading"
if self._closed:
raise StreamClosedError(real_error=self.error)
future = self._read_future = Future()
self._read_bytes = num_bytes
self._read_partial = False
if self._read_buffer_size >= self._read_bytes:
future, self._read_future = self._read_future, None
data = b"".join(self._read_buffer)
self._read_buffer.clear()
self._read_buffer_size = 0
self._read_bytes = 0
future.set_result(data)
return future
read_bytes = read
def _handle_write(self):
while self._write_buffer:
try:
data = self._write_buffer.popleft()
num_bytes = self.socket.send(data)
self._write_buffer_size -= num_bytes
if num_bytes < len(data):
self._write_buffer.appendleft(data[num_bytes:])
return
except (socket.error, IOError, OSError) as e:
en = e.errno if hasattr(e, 'errno') else e.args[0]
if en in _ERRNO_WOULDBLOCK:
self._write_buffer.appendleft(data)
break
self.close(exc_info=True)
return
if not self._write_buffer:
if self._state & self.io_loop.WRITE:
self._state = self._state & ~self.io_loop.WRITE
self.io_loop.update_handler(self.fileno(), self._state)
def write(self, data):
assert isinstance(data, bytes)
if self._closed:
raise StreamClosedError(real_error=self.error)
if data:
self._write_buffer.append(data)
self._write_buffer_size += len(data)
if not self._connecting:
self._handle_write()
if self._write_buffer:
if not self._state & self.io_loop.WRITE:
self._state = self._state | self.io_loop.WRITE
self.io_loop.update_handler(self.fileno(), self._state)
class AsyncSocket(object):
def __init__(self, sock):
self._iostream = IOStream(sock)
self._resolver = Resolver()
self._readtimeout = 0
self._connecttimeout = 0
self._rbuffer = StringIO(b'')
self._rbuffer_size = 0
def set_readtimeout(self, timeout):
self._readtimeout = timeout
def set_connecttimeout(self, timeout):
self._connecttimeout = timeout
@synclize
def connect(self, address):
host, port = address
timer = None
try:
if self._connecttimeout:
timer = Timeout(self._connecttimeout)
timer.start()
resolved_addrs = yield self._resolver.resolve(
host,
port,
family=socket.AF_INET)
for addr in resolved_addrs:
family, host_port = addr
yield self._iostream.connect(host_port)
break
except TimeoutException as e:
self.close()
raise socket.timeout(e.message)
finally:
if timer:
timer.cancel()
def sendall(self, buff):
self._iostream.write(buff)
def read(self, nbytes):
if nbytes <= self._rbuffer_size:
self._rbuffer_size -= nbytes
return self._rbuffer.read(nbytes)
if self._rbuffer_size > 0:
self._iostream._read_buffer.appendleft(self._rbuffer.read())
self._iostream._read_buffer_size += self._rbuffer_size
self._rbuffer_size = 0
if nbytes <= self._iostream._read_buffer_size:
data, data_len = b''.join(
self._iostream._read_buffer), self._iostream._read_buffer_size
self._iostream._read_buffer.clear()
self._iostream._read_buffer_size = 0
if data_len == nbytes:
return data
self._rbuffer_size = data_len - nbytes
self._rbuffer = StringIO(data)
return self._rbuffer.read(nbytes)
data = self._read(nbytes)
if len(data) == nbytes:
return data
self._rbuffer_size = len(data) - nbytes
self._rbuffer = StringIO(data)
return self._rbuffer.read(nbytes)
@synclize
def _read(self, nbytes):
timer = None
try:
if self._readtimeout:
timer = Timeout(self._readtimeout)
timer.start()
data = yield self._iostream.read_bytes(nbytes)
raise Return(data)
except TimeoutException as e:
self.close()
raise socket.timeout(e.message)
finally:
if timer:
timer.cancel()
def recv(self, nbytes):
return self.read(nbytes)
def close(self):
self._iostream.close()
def set_nodelay(self, flag):
self._iostream.set_nodelay(flag)
def settimeout(self, timeout):
pass
def shutdown(self, direction):
if self._iostream.fileno():
self._iostream.fileno().shutdown(direction)
def recv_into(self, buff):
expected_rbytes = len(buff)
data = self.read(expected_rbytes)
srcarray = bytearray(data)
nbytes = len(srcarray)
buff[0:nbytes] = srcarray
return nbytes
def makefile(self, mode, other):
return self
def fileno(self):
return self._iostream.fileno()
class Event(object):
def __init__(self):
self._waiter = []
self._ioloop = IOLoop.current()
def set(self):
self._ioloop.add_callback(self._notify)
def wait(self, timeout=None):
current_greenlet = greenlet.getcurrent()
self._waiter.append(current_greenlet.switch)
waiter = Waiter()
if timeout:
timeout_checker = Timeout(timeout)
timeout_checker.start(current_greenlet.throw)
waiter.get()
timeout_checker.cancel()
else:
waiter.get()
def _notify(self):
for waiter in self._waiter:
waiter(self)
class Pool(object):
def __init__(self, max_size=32, wait_timeout=8, params={}):
self._maxsize = max_size
self._conn_params = params
self._pool = deque(maxlen=self._maxsize)
self._wait = deque()
self._wait_timeout = wait_timeout
self._count = 0
self._started = False
self._ioloop = IOLoop.current()
self._event = Event()
self._ioloop.add_future(spawn(self.start), lambda future: future)
def create_raw_conn(self):
pass
def init_pool(self):
self._count += 1
conn = self.create_raw_conn()
self._pool.append(conn)
@property
def size(self):
return len(self._pool)
def get_conn(self):
while 1:
if self._pool:
return self._pool.popleft()
elif self._count < self._maxsize:
self.init_pool()
else:
return self.wait_conn()
def wait_conn(self):
timer = None
child_gr = greenlet.getcurrent()
main = child_gr.parent
try:
if self._wait_timeout:
timer = Timeout(self._wait_timeout)
timer.start()
self._wait.append(child_gr.switch)
return main.switch()
except TimeoutException:
raise Exception("timeout wait connections, connections size %s",
self.size)
finally:
if timer:
timer.cancel()
def release(self, conn):
if self._wait:
switch = self._wait.popleft()
self._ioloop.add_callback(switch, conn)
else:
self._pool.append(conn)
def quit(self):
self._started = False
self._event.set()
def _close_all(self):
for conn in tuple(self._pool):
conn.close()
self._pool = None
def start(self):
# self.init_pool()
self._started = True
self._event.wait()
self._close_all()
``` |
{
"source": "126alexander/ASS_126",
"score": 4
} |
#### File: 126alexander/ASS_126/Temperature.py
```python
import math
import time
# Intro
print("Welcome to the Temperature Conventer. Type C for Celsuis, F for Fahreinheit and K for Kelvin")
# Function
def again():
try_again = print()
# Letting the user choose the temperature and convert it to another temperature else
User_Temperature = input("your temperature | C | F | K | ").upper()
convert_Temperature = input("The temperature you want to convert to | C | F | K | ").upper()
# If the user's intial temperature (C, F, or K) convert it to what the user wants to convert to (C, F, or K) and give him the equation
if User_Temperature == "C":
if convert_Temperature == "F":
degree = float(input("enter the degree: "))
result = (degree * 9/5) + 32
print(f"{result}°F \nThe equation: ({degree} × 9/5) + 32 = {result}")
elif convert_Temperature == "K":
degree = float(input("enter the degree: "))
result = degree + 273.15
print(f"{result}°K \nThe equation: {degree} + 273.15 = {result}")
elif convert_Temperature == "C":
print("This is the same type of temperature")
time.sleep(1)
again()
else:
print("Type a temperature")
time.sleep(1)
again()
elif User_Temperature == "F":
if convert_Temperature == "C":
degree = float(input("enter the degree: "))
result = (degree - 32) * 5/9
print(f"{result}°F \nThe equation: ({degree} - 32) × 5/9 = {result}")
elif convert_Temperature == "K":
degree = float(input("enter the degree: "))
result = (degree - 32) * 5/9 + 273.15
print(f"{result}°K \nThe equation: ({degree} - 32) × 5/9 + 273.15 = {result}")
elif convert_Temperature == "F":
print("This is the same type of temperature")
time.sleep(1)
again()
else:
print("Type a temperature")
time.sleep(1)
again()
elif User_Temperature == "K":
if convert_Temperature == "C":
degree = float(input("enter the degree: "))
result = degree - 273.15
print(f"{result}°F \nThe equation: {degree} - 273.15 = {result}")
elif convert_Temperature == "F":
degree = float(input("enter the degree: "))
result = (degree - 273.15) * 9/5 + 32
print(f"{result}°K \nThe equation: ({degree} - 273.15) × 9/5 + 32 = {result}")
elif convert_Temperature == "K":
print("This is the same type of temperature")
time.sleep(1)
again()
else:
print("Type a temperature")
time.sleep(1)
again()
else:
print("Type a temperature")
time.sleep(1)
again()
# Aking if the user wants to convert again
while try_again != "Yes" and try_again != "No":
print("\nDo you want to try again?")
try_again = input("Yes | No | ").lower().capitalize()
if try_again == "Yes":
again()
break
elif try_again == "No":
print("Goodbye")
break
again()
``` |
{
"source": "126alexander/LAB_13",
"score": 4
} |
#### File: LAB_13/modules/fun_task_1.py
```python
import random
def answer():
i = 24
grade = []
for i in range(0, i):
grade.append(random.randint(2, 5))
print(f"Массив оценок: {grade}")
count = 0
for i in grade:
count += grade[i] / 5;
print(f"Из них пятёрок: {count:0.1f}")
``` |
{
"source": "1271756664/PyHail",
"score": 2
} |
#### File: PyHail/pyhail/hdr.py
```python
def main(radar_dict):
"""
Hail Differential Reflectity Retrieval
Required DBZH and ZDR fields
Parameters:
===========
radar_dict: dictionary
contains two entries, dbz and zdr, which contain numpy arrays of their respective fields.
Returns:
========
hdr_meta: dict
pyart field dictionary containing HDR dataset
hdr_size_meta: dict
pyary field dictionary containing HDR size dataset
"""
# extract fields
dbz = radar_dict["dbz"]
zdr = radar_dict["zdr"]
# calculate hdr
# apply primary function
zdr_fun = 19 * zdr + 27
# set limits based on zdr
zdr_fun[zdr <= 0] = 27
zdr_fun[zdr > 1.74] = 60
# apply to zhh
hdr = dbz - zdr_fun
# use polynomial from Depue et al. 2009 to transform dB into mm
hdr_size = 0.0284 * (hdr ** 2) - 0.366 * hdr + 11.69
hdr_size[hdr <= 0] = 0
# generate meta
hdr_meta = {
"data": hdr,
"units": "dB",
"long_name": "Hail Differential Reflectivity",
"description": "Hail Differential Reflectivity developed by <NAME> Zhao (1990) doi:10.1109/TGRS.1990.572906"
}
hdr_size_meta = {
"data": hdr_size,
"units": "mm",
"long_name": "HDR hail size estimate",
"description": "Hail Differential Reflectivity Hail Size developed by Depue et al. (2009) doi:10.1175/JAM2529.1",
"comments": "transform from HDR (dB) to hail size (mm); function scaled from paper figure"
}
# return hdr data
return hdr_meta, hdr_size_meta
``` |
{
"source": "1271756664/study",
"score": 2
} |
#### File: pycwr/retrieve/HID.py
```python
from ..configure.location_config import mbf_path
import xarray as xr
import numpy as np
DEFAULT_WEIGHTS = {'dBZ': 1.5, 'ZDR': 0.8, 'KDP': 1.0, 'CC': 0.8, 'LDR': 0.5, 'T': 0.4}
beta_params = xr.open_dataset(mbf_path)
def hid_beta_function(x, m, a, b):
"""
:param x: input data(Zh, Zdr, Kdp...)
:param m: center
:param a: width
:param b: slope
:return:
"""
if None in x:
return 0
else:
return 1 / (1 + ((x - m) / a) ** (2 * b))
def fhc_HCL(dBZ=None, ZDR=None, KDP=None, CC=None, LDR=None, T=None, method="hybrid",
band="C", weights=DEFAULT_WEIGHTS):
"""
Does FHC for warm-season precip. using beta function for Fuzzy Logic
HCL types: Species #:
-------------------------------
Drizzle 1
Rain 2
Ice Crystals 3
Dry Aggregates Snow 4
Wet Snow 5
Vertical Ice 6
Low-Density Graupel 7
High-Density Graupel 8
Hail 9
Big Drops 10
-------------------------------
cite{Dolan, Brenda , et al. "A Robust C-Band Hydrometeor Identification Algorithm and Application to
a Long-Term Polarimetric Radar Dataset." Journal of Applied Meteorology and Climatology 52.9(2013):2162-2186.}
:param weights:
:param dBZ: Input reflectivity scalar/array
:param ZDR: Input differential reflectivity scalar/array
:param KDP: Input specific differential phase scalar/array
:param CC: Input cross correlation ratio scalar/array
:param LDR: Input linear depolarization ratio scalar/array
:param T: Input temperature scalar/array
:param method: Currently support 'hybrid' or 'linear' methods; hybrid preferred
:param band: 'X', 'C', or 'S'
:return: hydrometeor species [1-10]
"""
if LDR is None and ZDR is None and KDP is None and CC is None:
print("No Polarimetric variable input!")
return
if dBZ is None:
print("No reflectivity variable input!")
return
if LDR is None:
weights['LDR'] = 0
if ZDR is None:
weights['ZDR'] = 0
if KDP is None:
weights['KDP'] = 0
if T is None:
weights['T'] = 0
if CC is None:
weights['CC'] = 0
Beta_ZDR = hid_beta_function(np.stack([ZDR] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="ZDR", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="ZDR", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="ZDR", Param="b").values)
Beta_dBZ = hid_beta_function(np.stack([dBZ] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="dBZ", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="dBZ", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="dBZ", Param="b").values)
Beta_KDP = hid_beta_function(np.stack([KDP] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="KDP", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="KDP", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="KDP", Param="b").values)
Beta_CC = hid_beta_function(np.stack([CC] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="CC", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="CC", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="CC", Param="b").values)
Beta_LDR = hid_beta_function(np.stack([LDR] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="LDR", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="LDR", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="LDR", Param="b").values)
if T is None:
Beta_T = 1
else:
Beta_T = hid_beta_function(np.stack([T] * 10, axis=-1),
beta_params.MBF.sel(Band=band, Feature="T", Param="m").values,
beta_params.MBF.sel(Band=band, Feature="T", Param="a").values,
beta_params.MBF.sel(Band=band, Feature="T", Param="b").values)
if method == "hybrid":
HCL = (Beta_LDR*weights['LDR'] + Beta_KDP*weights['KDP'] + Beta_ZDR*weights['ZDR'] + Beta_CC*weights['CC'])/\
(weights['LDR'] + weights['KDP'] + weights['ZDR'] + weights['CC'])*Beta_T*Beta_dBZ
elif method=="linear":
HCL = (Beta_LDR*weights['LDR'] + Beta_KDP*weights['KDP'] + Beta_ZDR*weights['ZDR'] + Beta_CC*weights['CC'] +\
Beta_T * weights['T'] + Beta_dBZ * weights['dBZ'])/(weights['LDR'] + weights['KDP'] + weights['ZDR'] +\
weights['CC'] + weights['T'] + weights['dBZ'])
else:
print("No weighting method defined, use hybrid or linear")
return
return np.where(np.any(np.isnan(HCL), axis=-1), np.nan, np.argmax(HCL, axis=-1) + 1)
``` |
{
"source": "1271756664/-xESMF",
"score": 3
} |
#### File: -xESMF/IO/read_netCDF_with_PyNIO.py
```python
import os
import numpy as np
import Ngl, Nio
#-----------------------------------------------------------------------
#-- Function: getVariableNames() - return the variable names (without coordinates)
#-----------------------------------------------------------------------
def getVariableNames(file):
dims = file.dimensions
coords = list(dims.keys())
names = list(file.variables.keys())
vnames = [n for n in names if n not in coords]
return(vnames, coords)
#-----------------------------------------------------------------------
#-- Function: main
#-----------------------------------------------------------------------
def main():
#-- input file rectilinear_grid_3d.nc from the NCL User Guide
#-- is available in the PyNGL installation
home = os.environ.get('HOME')
fname = os.path.join(home,"local/miniconda2/envs/pyngl_py3/lib/python3.7/site-packages/ngl/ncarg/data/nug/rectilinear_grid_3D.nc") #-- data file name
#-- open file and print some information similar to ncdump and others
file = Nio.open_file(fname,"r")
print('------------------------------------------------------')
print()
print('--> file ', file)
print()
print('--> file attributes ', file.attributes)
print()
dims = file.dimensions
print('--> file dimensions ', dims.values)
print()
print('--> file size of dimension time = ', dims['time'])
print('--> file size of dimension lat = ', dims['lat'])
print('--> file size of dimension lon = ', dims['lon'])
#-- same as above
#print('--> file size of dimension time = ', file.dimensions['time'])
#print('--> file size of dimension lat = ', file.dimensions['lat'])
#print('--> file size of dimension lon = ', file.dimensions['lon'])
print()
#-- get the variable and coordinates names (using function defined at the top of the script)
vnames, coords = getVariableNames(file)
print('--> Variable names (no coordinates): ', vnames)
print('--> Variable names for coordinates: ', coords)
print()
#-- get the attributes of variable 't'
vattr = [getattr(file.variables['t'],a) for a in file.variables['t'].attributes.keys()]
print('--> file variable attributes ', list(vattr))
print()
#-- read variable 't', first timestep, first level
var = file.variables['t'][0,0,:,:]
#-- print the size and shape of the variable
print('------------------------------------------------------')
print()
print('--> var.size ',var.shape[0] * var.shape[1])
print('--> var.shape ',var.shape)
#-- same as
#print('--> var.size ',f.dimensions['lat'] * f.dimensions['lon'])
#print('--> var.shape ',var.shape)
print()
#-- read variable latitude and longitude arrays
lat = file.variables['lat'][:]
lon = file.variables['lon'][:]
#-- print the minimum and maximum of lat and lon
print('------------------------------------------------------')
print()
print('--> lat min ', lat.min().item())
print('--> lat max ', lat.max())
print('--> lon min ', lon.min())
print('--> lon max ', lon.max())
#-- the above notation has the same results as below
#print('--> lat min ', lat.min().item())
#print('--> lat max ', lat.max().item())
#print('--> lon min ', lon.min().item())
#print('--> lon max ', lon.max().item())
print()
#-- retrieve the name of the coordinates lat/lon variables and the values of
#-- the shape of the coordinates
dimslat = coords[0]
shapelat = lat.shape[0]
dimslon = coords[1]
shapelon = lon.shape[0]
nrlat = shapelat
nrlon = shapelon
print('------------------------------------------------------')
print()
print('--> dimslat: ',dimslat, ' dimslon: ',dimslon,' nrlat: ',nrlat,' nrlon: ',nrlon)
print()
#-- print variable information
print('------------------------------------------------------')
print()
print('--> var information')
print()
print(var)
print()
##-- print the variable attributes
#print('------------------------------------------------------')
#print()
#print('--> attributes: ',var.key())
#print()
#-- print the variable values
#print('------------------------------------------------------')
#print()
#print('--> values ')
#print()
#print(var.values)
#print()
#-- print the type of the variable (DataArray)
print('------------------------------------------------------')
print()
print('--> type(var) ',type(var))
print()
#-- print the type of the variable values (numpy.ndarray)
print('------------------------------------------------------')
print()
print('--> type(var.values) ',type(var[:,:]))
print()
#-- select variable t from dataset for first timestep
print('------------------------------------------------------')
print()
print('--> dataset variable t (time=0, lev=6)')
print()
print(file.variables['t'][0,6,:,:])
print()
#-- select variable t from dataset, lat index 1 and lon index 2
print('------------------------------------------------------')
print()
print('--> dataset variable t select data which is closest to lat=1 and lon=2')
print()
print(file.variables['t'][:,:,1,2])
print()
#-- select variable t, timestep 2001-01-01
print('------------------------------------------------------')
print()
print('--> time(0) = "2001-01-01"')
print()
print(file.variables['t'][0,:,:,:])
print()
#-- select a sub-region (slice) - Take attention to the strange notation of the selection!
#-- The leading i tells PyNIO to use the index instead of coordinate values, e.g. time|i0
print('------------------------------------------------------')
print()
print('--> select sub-region')
print()
print(file.variables['t']['time|i0 lev|: lat|20:0 lon|-25:0'])
print()
#-- print median values of variable t of dataset, one value for each level (axis=lat,lon)
print('------------------------------------------------------')
print()
print('--> variable median')
print()
print(np.median(file.variables['t'],axis=(2,3)))
print()
#-- compute the means of the variable t of the dataset, one value for each level (axis=lat,lon)
print('------------------------------------------------------')
print()
print('--> means')
print()
means = np.mean(file.variables['t'], axis=(2,3))
print(means)
print()
#-- compute the mean of the variable t which are greater than 273.15 K
print('------------------------------------------------------')
print()
print('--> only means greater than 273.15 K')
print()
print(means[np.where(means > 273.15)])
print()
#-------------------------------------------------------------
#-- run main
#-------------------------------------------------------------
if __name__ == "__main__":
main()
```
#### File: Transition_examples_NCL_to_PyNGL/xy/TRANS_bar_chart.py
```python
from __future__ import print_function
import numpy as np
import Ngl
#-- function get_bar returns coordinates of a bar
def get_bar(x,y,dx,ymin,bar_width_perc=0.6):
dxp = (dx * bar_width_perc)/2.
xbar = np.array([x-dxp, x+dxp, x+dxp, x-dxp, x-dxp])
ybar = np.array([ ymin, ymin, y, y, ymin])
return xbar,ybar
#--------------
# MAIN
#--------------
#-- create random x- and y-values
x = np.arange(1,13,1)
y = [8,5,11,6,9,9,6,2,4,1,3,3]
dx = min(x[1:-1]-x[0:-2]) #-- distance between x-values
#-- define color and x-axis labels
color = 'blue'
xlabels = ["Jan","Feb","Mar","Apr","May","Jun", \
"Jul","Aug","Sep","Oct","Nov","Dec"]#-- x-axis labels
#-- open a workstation
wkres = Ngl.Resources() #-- generate an resources object for workstation
wks_type = "png" #-- output type of workstation
wks = Ngl.open_wks(wks_type,"plot_TRANS_bar_chart_py",wkres)
#-- set resources
res = Ngl.Resources() #-- generate an res object for plot
res.nglFrame = False #-- don't advance frame
res.nglPointTickmarksOutward = True #-- point tickmarks outward
res.tiXAxisString = "x-values" #-- x-axis title
res.tiYAxisString = "y-values" #-- y-axis title
res.tmXBMode = "Explicit" #-- define bottom x-axis values and labels
res.tmXBValues = x #-- x-axis values
res.tmXBLabels = xlabels #-- x-axis labels
res.tmXBLabelFontHeightF = 0.012 #-- bottom x-axis font size
res.trXMinF = 0.0 #-- x-axis min value
res.trXMaxF = 13.0 #-- x-axis max value
res.trYMinF = 0.0 #-- y-axis min value
res.trYMaxF = 12.0 #-- y-axis max value
#-- bar resources
barres = Ngl.Resources() #-- resource list for bars
barres.gsFillColor = color #-- set bar color
#-- loop through each y point and create bar
for i in range(len(y)):
xbar,ybar = get_bar(x[i], y[i], dx, res.trXMinF, 0.3)
plot = Ngl.xy(wks, xbar, ybar, res)
Ngl.polygon(wks, plot, xbar, ybar, barres) #-- filled bar
Ngl.frame(wks) #-- advance frame
Ngl.end()
```
#### File: Visualization/PyNGL/Hovmoeller_plot_hov_1.py
```python
from __future__ import print_function
import numpy as np
import os, sys
import Ngl,Nio
#-------------------------------------------------------
#-- Function: add_titles(wks,plot,resources,title,left,center,right)
#-------------------------------------------------------
def add_titles(wks,plot,title="",left="",center="",right=""):
vpx = Ngl.get_float(plot,"vpXF") #-- retrieve value of res.vpXF from plot
vpy = Ngl.get_float(plot,"vpYF") #-- retrieve value of res.vpYF from plot
vpw = Ngl.get_float(plot,"vpWidthF") #-- retrieve value of res.vpWidthF from plot
vph = Ngl.get_float(plot,"vpHeightF") #-- retrieve value of res.vpHeightF from plot
ymax = vpy+0.08 #-- we need space for the title and strings
if(ymax > 0.98):
print("--> if you can't see the title use res.nglMaximize = False and/or set res.vpYF")
#-- add title
if(title != ""):
tires = Ngl.Resources()
tires.txFontHeightF = 0.018
tires.txJust = "CenterCenter"
tires.txFont = 22 #-- Font 22: Helvetica bold
if(left != "" or center != "" or right != ""):
y = vpy + 0.07
else:
y = vpy + 0.05
Ngl.text_ndc(wks, title, 0.5, y, tires)
#-- add left, center and/or right string
txres = Ngl.Resources()
txres.txFontHeightF = 0.014 #-- font size for left, center and right string
y = vpy + 0.035 #-- y-position
if(left != ""):
txres.txJust = "CenterLeft" #-- text justification
x = vpx #-- x-position
Ngl.text_ndc(wks, left, x, y, txres) #-- add text to wks
if(center != ""):
txres.txJust = "CenterCenter" #-- text justification
Ngl.text_ndc(wks, center, 0.5, y, txres) #-- add text to wks
if(right != ""):
txres.txJust = "CenterRight" #-- text justification
x = vpx+vpw #-- x-position
Ngl.text_ndc(wks, right, x, y, txres) #-- add text to wks
#-------------------------------------------------------
#-- MAIN
#-------------------------------------------------------
#-- data path and file name
ncarg_root = os.environ.get('NCARG_ROOT')
diri = ncarg_root + '/lib/ncarg/data/cdf/'
fname = 'chi200_ud_smooth.nc'
#-- open file and read variables
f = Nio.open_file(diri + fname,"r") #-- open data file
chi = f.variables['CHI'][:,:] #-- read variable CHI[time,lon]
lon = f.variables['lon'][:]
time = f.variables['time'][:]
scale = 1.0e6
chi = chi/scale
#-- create the plot
wks = Ngl.open_wks('png','plot_hovmueller')
#-- open workstation
#-- set resources
res = Ngl.Resources
res.nglFrame = False
res.nglMaximize = False #-- maximize plot output
#res.tiMainString = 'Default Hovmu~H-13V2F35~H~FV-2H3~ller' #-- title
res.sfXArray = lon #-- scalar field x
res.sfYArray = time #-- scalar field y
res.tiYAxisString = 'elapsed time'
res.tmYLLabelFontHeightF = 0.015
res.nglPointTickmarksOutward = True #-- point tickmarks out
plot = Ngl.contour(wks,chi,res) #-- draw contours
#-- delete resources because they will cause warnings (Why?)
del([res.sfXArray,res.sfYArray,res.tiYAxisString,res.tmYLLabelFontHeightF])
#-- add the title and left, center and/or right string
title = "Default Hovmu~H-13V2F35~H~FV-2H3~ller"
long_name = f.variables["CHI"].attributes['long_name']
units = f.variables["CHI"].attributes['units']
add_titles(wks,plot,title,left=long_name,right=units)
#-- advance the frame
Ngl.frame(wks)
#-- end
Ngl.end()
```
#### File: Visualization/PyNGL/xy_plot_timeseries_xarray.py
```python
from __future__ import print_function
import sys, os
import numpy as np
import xarray as xr
import datetime
import Ngl
#-----------------------------------------------------------------------
#-- Function: add_titles(wks, plot, title, left, center, right, xtitle, ytitle)
#-----------------------------------------------------------------------
def ngl_Strings(wks, plot, title='', left='', center='', right='', xtitle='', ytitle=''):
vpx = Ngl.get_float(plot,"vpXF") #-- retrieve value of res.vpXF from plot
vpy = Ngl.get_float(plot,"vpYF") #-- retrieve value of res.vpYF from plot
vpw = Ngl.get_float(plot,"vpWidthF") #-- retrieve value of res.vpWidthF from plot
vph = Ngl.get_float(plot,"vpHeightF") #-- retrieve value of res.vpHeightF from plot
ymax = vpy+0.08 #-- we need space for the title and strings
if(ymax > 0.98):
print("--> if you can't see the title use res.nglMaximize = False and/or set res.vpYF")
#-- add title
if(title != ""):
tires = Ngl.Resources()
tires.txFontHeightF = 0.016
tires.txJust = "CenterCenter"
tires.txFont = 22 #-- Font 22: Helvetica bold
if(left != "" or center != "" or right != ""):
y = vpy + 0.075
else:
y = vpy + 0.05
Ngl.text_ndc(wks, title, 0.5, y, tires)
#-- add left, center and/or right string
txres = Ngl.Resources()
txres.txFontHeightF = 0.020 #-- font size for left, center and right string
y = vpy + 0.035 #-- y-position
if(left != ""):
txres.txJust = "CenterLeft" #-- text justification
x = vpx #-- x-position
Ngl.text_ndc(wks, left, x, y, txres) #-- add text to wks
if(center != ""):
txres.txJust = "CenterCenter" #-- text justification
Ngl.text_ndc(wks, center, 0.5, y, txres) #-- add text to wks
if(right != ""):
txres.txJust = "CenterRight" #-- text justification
x = vpx+vpw #-- x-position
Ngl.text_ndc(wks, right, x, y, txres) #-- add text to wks
#-- add y-axis title string
txtires = Ngl.Resources()
txtires.txFontHeightF = 0.024 #-- font size for x-axis title string
txtires.txAngleF = 90.0
txtires.txJust = "CenterCenter" #-- text justification
y = vpy - vph/2 #-- y-position
x = vpx - 0.12
Ngl.text_ndc(wks, ytitle, x, y, txtires) #-- add text to wks
#-----------------------------------------------------------------------
#-- Function: conv_time_netcdf(ds)
#-----------------------------------------------------------------------
def conv_time_netcdf(ds):
ntime = len(ds.time)
years = ds.time.dt.year.values
months = ds.time.dt.month.values
days = ds.time.dt.day.values
date_labels = [datetime.date(years[i],months[i],days[i]) for i in range(0,ntime)]
date_labels = np.array(date_labels,dtype='str')
return(date_labels)
#-----------------------------------------------------------------------
#-- Function: main
#-----------------------------------------------------------------------
def main():
print('')
#-- open file and read variable and time
home = os.environ.get('HOME')
fname = os.path.join(home,'NCL/PyNGL/User_Guide_examples/rectilinear_grid_2D.nc')
ds = xr.open_dataset(fname)
var = ds.tsurf
time = ds.time
#-- xarray deletes the units and long_name attributes, so we have to get
#-- them on another way
print('--> time attributes:', ds.time.attrs)
print('')
units = var.attrs['units']
lname = var.attrs['long_name']
#-- print some information about the variable and the time coordinate
print('--> var: ',var)
print('')
#-- convert the time values to date strings using a user defined function
date_labels = conv_time_netcdf(ds)
print('--> date_labels ',type(date_labels))
print('')
#-- for explicit x-axis generate simple time array
time = np.arange(0,len(ds.time),1)
#-- compute the area mean without weighting
areamean = np.average(var,axis=(1,2))
print('--> areamean: ',areamean)
print('')
#-- open a workstation
wks = Ngl.open_wks('png','plot_xy_plot_timeseries') #-- graphics output
#-- set resources/attributes
res = Ngl.Resources() #-- generate an res object for plot
res.tiMainString = 'DKRZ Example: xy-plot timeseries' #-- draw a title
res.tiMainOffsetYF = 0.02
res.nglMaximize = False
res.nglPointTickmarksOutward = True #-- point tickmarks outward
res.nglDraw = False
res.nglFrame = False
res.vpWidthF = 0.7
res.vpHeightF = 0.7
res.vpXF = 0.2
res.vpYF = 0.85
res.tmXBMode = 'Explicit' #-- use explicit values
res.tmXBValues = time
res.tmXBLabels = list(date_labels)
res.tmXBLabelFontHeightF = 0.006
res.tmXBLabelJust = 'CenterRight'
res.tmXBLabelDeltaF = 0.2
res.tmXBLabelAngleF = 40.0
res.tmXBLabelStride = 4
#-- draw the plot
plot = Ngl.xy(wks,time,areamean,res)
#-- add additional strings to plot (like NCL's gsnLeftString and gsnRightString)
ngl_Strings(wks, plot, left=lname, right=units, ytitle=lname)
#-- done
Ngl.draw(plot)
Ngl.frame(wks)
Ngl.end()
#-------------------------------------------------------------
#-- run main
#-------------------------------------------------------------
if __name__ == "__main__":
main()
``` |
{
"source": "1271756664/xgrads",
"score": 2
} |
#### File: xgrads/xgrads/core.py
```python
import os, sys, re
import numpy as np
from datetime import datetime
from numpy import datetime64, timedelta64
"""
Core classes are defined below
"""
class CtlDescriptor(object):
"""
This class represents a descriptor file like the .ctl file for GrADS.
It generally includes a multi-dimensional spherical dataset.
Attributes:
dsetPath: dataset file
descPath: descriptor file
indxPath: index file (for GRIB file)
stnmPath: station map file (for station file)
pdef: projection-definition
tdef: t-definition
zdef: z-definition
ydef: y-definition
xdef: x-definition
vdef: variable-definition
zrev: z-dimension reverse (i.e., from north to south)
yrev: y-dimension reverse (i.e., from upper to lower levels)
hasData: whether the corresponding binary data file exist
vcount : variable count
dtype: data type
periodicX: whether xdef is periodic
cal365Days: whether the calendar is always 365 days (no leap year)
template : whether it is a template for multiple binary files
sequential: whether it is a sequential file (Fortran style)
byteOrder : byte order, little-endian or big-endian
storage : storage type, '99' or '-1,20' or others
totalZCount: total number of horizontal slice
zRecLength : record length of a single horizontal slice
tRecLength : record length of a single time (including all variables)
"""
def __init__(self, encoding='GBK', **kwargs):
"""
Constructor.
Parameters
----------
encoding : str
Encoding for the ctl file contents e.g., ['GBK', 'UTF-8'].
file = fileName : str
The ctl path/file name.
content = content: str
A string representation for the ctl file contents.
Returns
----------
ctl : CtlDescriptor
An object represents the ctl file
"""
self.vcount = 0
self.pdef = None
self.tdef = None
self.zdef = None
self.ydef = None
self.xdef = None
self.vdef = None
self.dsetPath = ''
self.descPath = ''
self.indxPath = ''
self.stnmPath = ''
self.storage = ''
self.dtype = ''
self.title = ''
self.incre = ''
self.zrev = False
self.yrev = False
self.hasData = False
self.periodicX = False
self.cal365Days = False
self.template = False
self.sequential = False
self.totalZCount = 0
self.zRecLength = 0
self.tRecLength = 0
self.byteOrder = sys.byteorder
if kwargs.get('file'):
abspath = kwargs['file']
if os.path.getsize(abspath) / (1024.0*1024.0) > 2:
raise Exception('ctl file is too large (> 2 MB)')
with open(abspath, 'r', encoding=encoding) as f:
fileContent = f.readlines()
elif kwargs.get('content'):
abspath = None
fileContent = kwargs['content'].splitlines()
else:
raise Exception('invalid key word '+
'(file='' or content='' is allowed)')
for i, line in enumerate(fileContent):
llower = line.lower()
if (llower.startswith('dset') or
llower.startswith('index') or
llower.startswith('stnmap')
) and abspath is not None and '^' in line:
dirname = os.path.dirname(abspath)
if dirname[-1] != '/':
fileContent[i] = line.replace('^',
os.path.dirname(abspath) + '/')
else:
fileContent[i] = line.replace('^',
os.path.dirname(abspath))
self.descPath=abspath
self.parse(fileContent)
def parse(self, fileContent):
dpath_str = None
for oneline in fileContent:
if oneline.lower().startswith('dset'):
dpath_str = oneline.split()[1]
elif oneline.lower().startswith('index'):
self._processIndex(oneline)
elif oneline.lower().startswith('stnmap'):
self._processStnmap(oneline)
elif oneline.lower().startswith('dtype'):
self.dtype = oneline[5:].strip()
elif oneline.lower().startswith('pdef'):
self._processPDEF(oneline)
elif oneline.lower().startswith('title'):
self.title = oneline.split()[1].strip()
elif oneline.lower().startswith('undef'):
self.undef = float(oneline.split()[1].strip())
elif oneline.lower().startswith('options'):
self._processOptions(oneline)
elif oneline.lower().startswith('byteswapped'):
self.byteOrder = 'big' \
if sys.byteorder == 'little' else 'little'
elif oneline.lower().startswith('xdef'):
self._processXDef(oneline, fileContent)
elif oneline.lower().startswith('ydef'):
self._processYDef(oneline,fileContent)
elif oneline.lower().startswith('zdef'):
self._processZDef(oneline, fileContent)
elif oneline.lower().startswith('tdef'):
self._processTDef(oneline)
elif oneline.lower().startswith('vars'):
self._processVars(oneline, fileContent)
elif oneline.startswith('*') or oneline.strip() == '':
continue
if dpath_str == None:
raise Exception('no valid dset is parsed')
if self.template:
self._processDSets(dpath_str)
else:
self._processDSet(dpath_str)
if self.yrev:
self.ydef.samples = np.flip(self.ydef.samples)
if self.zrev:
self.zdef = np.flip(self.zdef)
def _processDSets(self, dpath_str):
strPos = dpath_str.find('%')
if strPos == -1:
raise Exception('template is used in ctl but no % in dset')
endPos = len(dpath_str) - dpath_str[::-1].index('%') + 2
template = dpath_str[strPos:endPos]
tokens = []
for token in self._split_by_len(template, 3):
tokens.append(self._get_template_format(token))
fmt = ''.join(tokens)
fileList = []
times = self.tdef.samples
for l in range(len(times)):
part = times[l].item().strftime(fmt)
fname = dpath_str[:strPos] + part + dpath_str[endPos:]
fname = self._replace_forecast_template(fname, l)
# remove duplicated file
if fname not in fileList:
fileList.append(fname)
self.dsetPath = np.array(fileList)
has = True
for file in fileList:
if not os.path.exists(file):
has = False
break
self.hasData = has
def _processDSet(self, dpath_str):
self.dsetPath = dpath_str
self.hasData = os.path.exists(self.dsetPath)
def _processIndex(self, oneline):
self.indxPath = oneline.split()[1]
def _processStnmap(self, oneline):
self.stnmPath = oneline.split()[1]
def _processPDEF(self, oneline):
self.pdef = PDEF(oneline)
def _processOptions(self, oneline):
lineLower = oneline.lower()
if 'yrev' in lineLower: self.yrev = True
if 'zrev' in lineLower: self.zrev = True
if 'template' in lineLower: self.template = True
if 'sequential' in lineLower: self.sequential = True
if '365_day_calendar' in lineLower: self.cal365Days = True
if 'big_endian' in lineLower: self.byteOrder = 'big'
if 'byteswapped' in lineLower: self.byteOrder = \
'big' if sys.byteorder == 'little' else 'little'
def _processXDef(self, oneline, fileContent):
tokens = oneline.lower().split()
xnum = int(tokens[1])
if tokens[2] == 'linear': xlnr = True
elif tokens[2] == 'levels': xlnr = False
else: raise Exception('invalid type for xdef (linear or levels): ' +
tokens[2])
if xlnr:
start, intv = float(tokens[3]), float(tokens[4])
self.xdef = Coordinate('xdef', np.linspace(start,
start + intv * (xnum-1),
xnum, dtype=np.float32))
else:
values = [float(i) for i in tokens[3:]]
count = len(values)
index = fileContent.index(oneline) + 1
while count < xnum:
split = fileContent[index].split()
values += [float(v) for v in split]
count += len(split)
index += 1
if count != xnum:
raise Exception('not parse xdef correctly')
self.xdef = Coordinate('xdef', np.array(values))
self.periodicX = self.xdef.isPeriodic(360)
def _processYDef(self, oneline, fileContent):
tokens = oneline.lower().split()
ynum = int(tokens[1])
if tokens[2] == 'linear': ylnr = True
elif tokens[2] == 'levels': ylnr = False
else: raise Exception('invalid type for ydef (linear or levels): ' +
tokens[2])
if ylnr:
start, intv = float(tokens[3]), float(tokens[4])
self.ydef = Coordinate('ydef', np.linspace(start,
start + intv * (ynum-1),
ynum, dtype=np.float32))
else:
values = [float(i) for i in tokens[3:]]
count = len(values)
index = fileContent.index(oneline) + 1
while count < ynum:
split = fileContent[index].split()
values += [float(v) for v in split]
count += len(split)
index += 1
if count != ynum:
raise Exception(('not parse ydef correctly, count={0} '+
'while ynum={1}').format(count, ynum))
self.ydef = Coordinate('ydef', np.array(values))
def _processZDef(self, oneline, fileContent):
tokens = oneline.lower().split()
znum = int(tokens[1])
if tokens[2] == 'linear': zlnr = True
elif tokens[2] == 'levels': zlnr = False
else: raise Exception('invalid type for zdef (linear or levels): ' +
tokens[2])
if zlnr:
start, intv = float(tokens[3]), float(tokens[4])
self.zdef = Coordinate('zdef', np.linspace(start,
start + intv * (znum-1),
znum, dtype=np.float32))
else:
values = [float(i) for i in tokens[3:]]
count = len(values)
index = fileContent.index(oneline) + 1
while count < znum:
split = fileContent[index].split()
values += [float(v) for v in split]
count += len(split)
index += 1
if count != znum:
raise Exception('not parse zdef correctly')
self.zdef = Coordinate('zdef', np.array(values))
def _processTDef(self, oneline):
tokens = oneline.lower().split()
tnum = int(tokens[1])
if tokens[2]!='linear':
raise Exception('nonlinear tdef is not supported')
times = self._times_to_array(tokens[3], tokens[4], tnum)
self.incre = GrADS_increment_to_timedelta64(tokens[4])
self.tdef = Coordinate('tdef', times)
def _processVars(self, oneline, fileContent):
if (self.dtype != 'station' and
not all([self.tdef, self.zdef, self.ydef, self.xdef])):
raise Exception('vdef should be after x, y, z and t definitions')
t = self.tdef.length()
if self.dtype != 'station':
y = self.ydef.length() if self.pdef is None else self.pdef.jsize
x = self.xdef.length() if self.pdef is None else self.pdef.isize
self.zRecLength = x * y * 4
# add two bytes at the beginning and the end
if self.sequential:
self.zRecLength += 8
tokens = oneline.split()
vnum = int(tokens[1])
start = fileContent.index(oneline) + 1
if vnum < 1:
raise Exception('there should be at least one CtlVar')
self.vcount = vnum
self.vdef = [None] * vnum
v = CtlVar(fileContent[start])
v.index =0
v.strPos=0
v.tcount=t
if self.dtype != 'station':
v.ycount=y
v.xcount=x
self.totalZCount += v.zcount
self.storage = v.storage
self.vdef[0] = v
type1 = type2 = type3 = False
for i in range(1, vnum):
v = CtlVar(fileContent[start+i])
vs = self.vdef[i-1]
v.index = i
v.undef = self.undef
v.tcount = t
if self.dtype != 'station':
v.ycount = y
v.xcount = x
# if v.storage in ['99', '0']:
if v.storage in ['99', '0', '00', '000', '1', '11', '111']:
v.strPos = vs.zcount * self.zRecLength + vs.strPos
type1 = True
elif v.storage == '-1,20':
v.strPos = vs.zcount * self.zRecLength * t + vs.strPos
type2 = True
else:
type3 = True
if not type3 and type1 == type2:
raise Exception('storage type should be the same')
self.totalZCount += v.zcount
self.vdef[i] = v
self.tRecLength = self.zRecLength * self.totalZCount
if fileContent[start + vnum].strip().lower() != 'endvars':
raise Exception('endvars is expected')
def _get_template_format(self, part):
"""
Get time format string. See the following URL for reference:
http://cola.gmu.edu/grads/gadoc/templates.html
%x1 1 digit decade
%x3 3 digit decade
%y2 2 digit year
%y4 4 digit year
%m1 1 or 2 digit month
%m2 2 digit month (leading zero if needed)
%mc 3 character month abbreviation
%d1 1 or 2 digit day
%d2 2 digit day (leading zero if needed)
%h1 1 or 2 digit hour
%h2 2 digit hour
%h3 3 digit hour (e.g., 120 or 012)
%n2 2 digit minute; leading zero if needed
%f2 2 digit forecast hour; leading zero if needed; more digits added
for hours >99; hour values increase indefinitely
%f3 3 digit forecast hour; leading zeros if needed; more digits added
for hours >999; hour values increase indefinitely
%fn2 2 digit forecast minute; leading zero if needed; more digits added
for minutes > 99; minute values increase indefinitely (2.0.a9+)
%fhn forecast time expressed in hours and minutes (hhnn) where minute
value (nn) is always <=59 and hour value (hh) increases indefinitely.
If hh or nn are <=9, they are padded with a 0, so they are always at
least 2 digits; more digits added for hours >99. (2.0.a9+)
%fdhn forecast time expressed in days, hours, and minutes (ddhhnn) where
minute value (nn) is always <=59, hour value (hh) is always <=23 and
day value (dd) increases indefinitely. If dd, hh, or nn are <=9, they
are padded with a 0 so they are always at least 2 digits; more digits
added for days >99. (2.0.a9+)
%j3 3 digit julian day (day of year) (2.0.a7+)
%t1 1 or 2 digit time index (file names contain number sequences that
begin with 1 or 01) (2.0.a7+)
%t2 2 digit time index (file names contain number sequences that begin
with 01) (2.0.a7+)
%t3 3 digit time index (file names contain number sequences that begin
with 001) (2.0.a7+)
%t4 4 digit time index (file names contain number sequences that begin
with 0001) (2.0.a8+)
%t5 5 digit time index (file names contain number sequences that begin
with 00001) (2.0.a8+)
%t6 6 digit time index (file names contain number sequences that begin
with 000001) (2.0.a8+)
%tm1 1 or 2 digit time index (file names contain number sequences that
begin with 0 or 00) (2.0.a7+)
%tm2 2 digit time index (file names contain number sequences that begin
with 00) (2.0.a7+)
%tm3 3 digit time index (file names contain number sequences that begin
with 000) (2.0.a7+)
%tm4 4 digit time index (file names contain number sequences that begin
with 0000) (2.0.a8+)
%tm5 5 digit time index (file names contain number sequences that begin
with 00000) (2.0.a8+)
%tm6 6 digit time index (file names contain number sequences that begin
with 000000) (2.0.a8+)
Parameters
----------
part : str
A string in the above format started with %.
Returns
-------
re : str
A string represents the format in python datetime
"""
if part == '%y2':
return '%y'
elif part == '%y4':
return '%Y'
elif part == '%m1':
return '%m'
elif part == '%m2':
return '%m'
elif part == '%mc':
return '%b'
elif part == '%d1':
return '%d'
elif part == '%d2':
return '%d'
elif part == '%h1':
return '%H'
elif part == '%h2':
return '%H'
elif part == '%n2':
return '%M'
elif part in ['%f3', '%f2']: # this is not supported by strftime()
return '_miniufo_' + part[1:]
else:
raise Exception('unsupported format: ' + part)
def _replace_forecast_template(self, fname, l):
"""
Replace forecast str %f as a template in dset.
Parameters
----------
fname: str
A given string of binary file.
l: int
Index of file in a template.
Returns
-------
re : str
A string after replacing the %f template.
"""
if fname.find('_miniufo_f3') != -1:
dt_h = self.incre.astype('timedelta64[s]') / \
np.timedelta64(1, 'h')
fname = fname.replace('_miniufo_f3', '{0:03d}'.format(
int(dt_h * l)))
if fname.find('_miniufo_f2') != -1:
dt_h = self.incre.astype('timedelta64[s]') / \
np.timedelta64(1, 'h')
fname = fname.replace('_miniufo_f2', '{0:02d}'.format(
int(dt_h * l)))
return fname
def _split_by_len(self, s, size):
"""
Split a string by a given size.
Parameters
----------
s : str
A given string.
size : int
A given size.
Returns
-------
re : list
A list contains the splitted strings.
"""
chunks = len(s)
return [s[i:i + size] for i in range(0, chunks, size)]
def _times_to_array(self, strTime, incre, tnum):
"""
Convert GrADS time string of strart time and increment
to an array of numpy.datetime64.
Parameters
----------
strTime : str
Grads start time e.g., 00:00z01Jan2000.
incre : str
Grads time increment in str format e.g., 1dy.
tnum : int
Grads time increment in str format e.g., 1dy.
Returns
----------
re : numpy array of datetime64
"""
if 'mo' in incre:
start = GrADStime_to_datetime(strTime)
lst = []
for l in range(tnum):
y, m = start.year, start.month
y, m = y+int((m+l-1)/12), int((m+l-1)%12)+1
lst.append(start.replace(year=y, month=m))
return np.asarray(lst, dtype='datetime64[s]')
elif 'yr' in incre:
start = GrADStime_to_datetime(strTime)
lst = []
for l in range(tnum):
y = start.year + l
lst.append(start.replace(year=y))
return np.asarray(lst, dtype='datetime64[s]')
else:
start = GrADStime_to_datetime64(strTime)
intv = GrADS_increment_to_timedelta64(incre)
return np.arange(start, start + intv * tnum, intv)
def __repr__(self):
"""
Print this class as a string.
"""
vdef = np.array(self.vdef)
pdef = self.pdef.proj if self.pdef is not None else ''
return \
' dsetPath: ' + str(self.dsetPath) + '\n'\
' descPath: ' + str(self.descPath) + '\n'\
' indxPath: ' + str(self.indxPath) + '\n'\
' stnmPath: ' + str(self.stnmPath) + '\n'\
' title: ' + str(self.title) + '\n'\
' undef: ' + str(self.undef) + '\n'\
' zrev: ' + str(self.zrev) + '\n'\
' yrev: ' + str(self.yrev) + '\n'\
' dtype: ' + str(self.dtype) + '\n'\
' template: ' + str(self.template) + '\n'\
' periodicX: ' + str(self.periodicX) + '\n'\
' cal365Days: ' + str(self.cal365Days)+ '\n'\
' sequential: ' + str(self.sequential)+ '\n'\
' byteOrder: ' + str(self.byteOrder) + '\n'\
' xdef: ' + str(self.xdef) + '\n'\
' ydef: ' + str(self.ydef) + '\n'\
' zdef: ' + str(self.zdef) + '\n'\
' tdef: ' + str(self.tdef) + '\n'\
' pdef: ' + str(pdef) + '\n'\
' vdef: ' + str(vdef)
class PDEF(object):
"""
PDEF class. Parse necessary info in PDEF.
Reference: http://cola.gmu.edu/grads/gadoc/pdef.html
"""
def __init__(self, oneline):
"""
Constructor.
Parameters
----------
oneline : str
The ASCII line of PDEF in ctl file.
"""
lineLower = oneline.lower()
if 'nps' in lineLower or 'sps' in lineLower:
token = lineLower.split()
if len(token) != 8:
raise Exception('not enough tokens for PDEF, ' +
'expected 8 but found ' + str(len(token)))
self.isize = int (token[1]) # size of native grid in x direction
self.jsize = int (token[2]) # size of native grid in y direction
self.proj = (token[3]) # type of projection
self.ipole = int (token[4]) # i-coord of pole ref to ll corner
self.jpole = int (token[5]) # j-coord of pole ref to ll corner
self.lonref = float(token[6]) # reference longitude
self.gridinc = float(token[7]) # distance between gripoints in km
elif 'lccr' in lineLower or 'lcc' in lineLower:
token = lineLower.split()
if len(token) != 13:
raise Exception('not enough tokens for PDEF, ' +
'expected 13 but found ' + str(len(token)))
self.isize = int (token[1]) # size of native grid in x direction
self.jsize = int (token[2]) # size of native grid in y direction
self.proj = (token[3]) # type of projection
self.latref = float(token[4]) # ref latitude
self.lonref = float(token[5]) # ref longitude (E>0, W<0)
self.iref = float(token[6]) # i of ref point
self.jref = float(token[7]) # j of ref point
self.Struelat= float(token[8]) # S true lat
self.Ntruelat= float(token[9]) # N true lat
self.slon = float(token[10]) # standard longitude
self.dx = float(token[11]) # grid X increment in meters
self.dy = float(token[12]) # grid Y increment in meters
else:
raise Exception('not currently supported PDEF\n' + oneline)
def __str__(self):
"""
Print this class as a string.
"""
return '\n'.join(['%s: %s' % item for item in self.__dict__.items()])
class Coordinate(object):
"""
Discrete sampled coordinate. This is a simple wrapper
for np.array for a coordinate.
"""
def __init__(self, name, samples):
"""
Constructor.
Parameters
----------
name : str
The name of the coordinate.
samples : np.array
1D array for the discrete coordinate.
"""
self.isLinear = True
self.isIncre = True
self.name = name
self.samples = samples
self.delSamples = None
self.max = np.max(self.samples)
self.min = np.min(self.samples)
if len(samples) > 1:
self.delSamples = np.diff(self.samples)
if self.samples[-1] < self.samples[0]:
self.isIncre=False
else:
self.delSamples = np.array([1])
def length(self):
return len(self.samples)
def isPeriodic(self,period):
# not physically but generally true
if not self.isLinear: return False
delta = self.delSamples[0]
start = self.samples[-1] + delta - period
if(abs((start - self.samples[0]) / delta > 1e-4)):
return False
return True
def __str__(self):
"""
Print this class as a string.
"""
return str(self.samples)
class CtlVar(object):
"""
A simple variable class used in .ctl file
"""
__reBlank = re.compile(r'[\s\t]+')
__reUnits = re.compile(r'\([^\(\)]+?\)')
def __init__(self, oneLineStr):
self.tcount = 0
self.zcount = 0
self.ycount = 0
self.xcount = 0
self.undef = np.nan
self.dependZ= True # whether the var depends on z
self.unit = ''
self.name = ''
self.comment= ''
self.index = 0
self.strPos = 0
self.name, self.zcount, self.storage, self.comment = \
CtlVar.__reBlank.split(oneLineStr.strip(), maxsplit=3)
self.zcount = int(self.zcount)
findMatch = CtlVar.__reUnits.findall(self.comment)
if findMatch:
self.unit = findMatch[-1]
self.comment = self.comment[:self.comment.index(self.unit)].strip()
else:
self.unit = ''
if self.zcount == 0:
self.zcount = 1
self.dependZ= False
def __str__(self):
"""
Print this class as a string.
"""
return '\n'.join(('%8s: %s' % item for item in self.__dict__.items()))
def __repr__(self):
"""
Print this class as a string.
"""
return 'CtlVar: {0:8s} in shape (t={1:d}, z={2:d}, y={3:d}, x={4:d})'\
.format(self.name,
self.tcount, self.zcount,
self.ycount, self.xcount)
"""
Some useful functions defined here
"""
def GrADStime_to_datetime(gradsTime):
"""
Convert GrADS time string e.g., 00:00z01Jan2000 to datetime
Parameters
----------
gradsTime : str
Grads time in str format e.g., 00:00z01Jan2000.
Returns
----------
re : datetime
"""
lens = len(gradsTime)
if lens==15 or lens==14:
time = datetime.strptime(gradsTime, "%H:%Mz%d%b%Y")
elif lens==12 or lens==11:
time = datetime.strptime(gradsTime, "%Hz%d%b%Y" )
elif lens== 9 or lens== 8:
time = datetime.strptime(gradsTime, "%d%b%Y" )
elif lens== 7:
time = datetime.strptime(gradsTime, "%b%Y" )
else:
raise Exception('invalid length of GrADS date/time string')
return time
def GrADStime_to_datetime64(gradsTime):
"""
Convert GrADS time string e.g., 00:00z01Jan2000 to numpy.datetime64
Parameters
----------
gradsTime : str
Grads time in str format e.g., 00:00z01Jan2000.
Returns
----------
re : datetime64
"""
time = GrADStime_to_datetime(gradsTime)
return datetime64(time.strftime('%Y-%m-%dT%H:%M:%S'))
def GrADS_increment_to_timedelta64(incre):
"""
Convert GrADS time increment string to numpy.timedelta64
Parameters
----------
incre : str
Grads time increment in str format e.g., 1dy.
Returns
----------
re : timedelta64
"""
unit = incre[-2:]
amount = incre[:-2]
unitDict = {
'se': 's',
'mn': 'm',
'hr': 'h',
'dy': 'D',
'mo': 'M',
'yr': 'Y'}
return timedelta64(int(amount), unitDict[unit])
"""
Helper (private) methods are defined below
"""
``` |
{
"source": "1271/hoper",
"score": 2
} |
#### File: hoper/hoper/_args.py
```python
from argparse import ArgumentParser, MetavarTypeHelpFormatter
from .meta import version
class Formatter(MetavarTypeHelpFormatter):
def _format_action_invocation(self, action):
if not action.option_strings:
default = self._get_default_metavar_for_positional(action)
metavar, = self._metavar_formatter(action, default)(1)
return metavar
else:
parts = []
if action.nargs == 0:
parts.extend(action.option_strings)
else:
default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
args_len = len(action.option_strings) - 1
for i, option_string in enumerate(action.option_strings):
if i == args_len:
parts.append('%s %s' % (option_string, args_string))
else:
parts.append(option_string)
return ', '.join(parts)
def get_cli_arguments() -> ArgumentParser: # pragma: no cover
args_parser = ArgumentParser(formatter_class=Formatter)
args_parser.add_argument('url', metavar='url', type=str, help='Analyzed url')
args_parser.add_argument('-u', '--user-agent', type=str, metavar='AGENT', help='User-agent',
default='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko)'
' Chrome/51.0.2704.103 Safari/537.36')
args_parser.add_argument('-c', '--cookies', metavar='C', type=str, nargs='*', default=[],
help='Cookies. Format: --cookies key1=value1 key2=value2')
args_parser.add_argument('-i', '--show-ip', action='store_true', help='Show ip for each hop')
args_parser.add_argument('-l', '--last-only', action='store_true', help='Show only last url (without history)')
args_parser.add_argument('-T', '--timeout', type=int, help='How long to wait for te server to send'
' data before giving up. In milliseconds (1/100 sec)')
args_parser.add_argument('-t', '--show-request-time', action='store_true', help='Show request time for each hop')
args_parser.add_argument('-E', '--no-error-messages', action='store_true', help='Don\'t show error messages')
args_parser.add_argument('-S', '--no-statistic', action='store_true', help='Don\'t show statistic message')
# args_parser.add_argument('-p', '--post', action='store_true', help='Use post instead of get')
args_parser.add_argument('-C', '--count-only', action='store_true', help='Show count hops and exit')
args_parser.add_argument('-j', '--allow-js-redirects', action='store_true', dest='try_js',
help='Try detect js redirects')
args_parser.add_argument('--proxy', type=str, metavar='URL', nargs='*',
help='Proxy. Format: http://proxy:123 (for http and https) or http=http://proxy:123'
' https=http://secured-proxy:321 ftp=http://ftp-proxy:332')
args_parser.add_argument('-F', '--do-not-follow-loops', action='store_true', dest='disallow_loops',
help='If loop detected, stop operation')
args_parser.add_argument('-J', '--print-json', action='store_true', help='Print result as json')
args_parser.add_argument('--pretty-json', action='store_true',
help='Makes sense only if the --print-json argument is specified')
args_parser.add_argument('-v', '--version', action='version', help='Show version and exit', version=version)
args_parser.add_argument('-H', '--disallow-hooks', action='store_true', help='Disable special hooks for some sites')
return args_parser
arguments = get_cli_arguments()
__all__ = ['arguments']
```
#### File: hoper/hoper/_print.py
```python
from sys import stderr
from urllib.parse import urlparse
from .util.store import store
from .util.types import Hope
from .util.utils import host2ip
def err(*args):
if store().allow_error_messages:
print(*args, file=stderr)
def scheme2port(scheme: str) -> int:
if scheme == 'https' or scheme == '': # https as default
return 443
if scheme == 'http':
return 80
raise RuntimeError('Scheme not supported')
def print_item(item: Hope):
info = 'Hope:\t%s\nStatus:\t%i\n' % (item.url, item.status)
if store().args.try_js:
info += 'Js: %s\n' % ('True' if 'js' == item.type else 'False')
if store().args.show_request_time:
info += 'Time:\t%0.2f\n' % item.time
if store().args.show_ip:
r = urlparse(item.url)
ipv4, ipv6 = host2ip(r)
for ip in ipv4:
info += 'Ip4:\t%s\n' % ip
for ip in ipv6:
info += 'Ip6:\t%s\n' % ip
print(info, end='')
```
#### File: hoper/util/proxy_parser.py
```python
from re import compile
from sys import stderr
from typing import List, Union, Dict, Optional
__all__ = ['parse_proxies']
RE = compile(r"""^(?:(?P<scheme>\w+)=)?(?P<url>\w+://.+)$""")
def parse_proxies(items: Optional[List]) -> Union[Dict[str, str]]:
proxies: Dict[str, str] = {}
if items is None:
return proxies
for i in items:
_parsed = RE.search(i)
if _parsed is None:
print('Error parsing %s' % i, file=stderr)
continue
parsed = _parsed.groupdict()
scheme = parsed.get('scheme', None)
url = parsed['url']
if scheme is None:
proxies.setdefault('http', url)
proxies.setdefault('https', url)
else:
proxies[scheme] = url
return proxies
```
#### File: hoper/util/utils.py
```python
from socket import getaddrinfo, AddressFamily
from sys import stderr
from typing import List, Tuple, Optional
from urllib.parse import urlparse, urlunparse, ParseResult
from requests import Response
from .store import store
default_scheme = 'http'
def err(*args):
if store().allow_error_messages:
print(*args, file=stderr)
def scheme2port(scheme: str) -> int:
if scheme == 'https' or scheme == '': # https as default
return 443
if scheme == 'http':
return 80
raise RuntimeError('Scheme not supported')
def host2ip(r: ParseResult) -> Tuple[List[str], List[str]]:
port = r.port or scheme2port(r.scheme)
info = getaddrinfo(r.hostname, port)
ipv4 = []
ipv6 = []
for ip in info:
_ip = ip[-1][0]
if ip[0] == AddressFamily.AF_INET and _ip not in ipv4:
ipv4.append(_ip)
elif ip[0] == AddressFamily.AF_INET6 and _ip not in ipv6:
ipv6.append(_ip)
return ipv4, ipv6
def normalize_url(url):
result = urlparse(url)
scheme = result.scheme
path = result.path
netloc = result.netloc
query = result.query
fragment = result.fragment
params = result.params
if '' == scheme:
err('Scheme has empty. Use default (%s)\n' % default_scheme)
scheme = default_scheme
if '' == netloc:
netloc = result.path
path = ''
return urlunparse((scheme, netloc, path, params, query, fragment))
def get_response_redirect_url(response: Response) -> Optional[str]:
if 300 <= response.status_code < 400:
return response.headers['location']
return None
def cookies2dict(cookies: List[str]):
_c = {}
for c in cookies:
key, value = c.split('=')
_c[key] = value
return _c
``` |
{
"source": "127t6937/chainer-gan-lib",
"score": 2
} |
#### File: chainer-gan-lib/dcgan/net.py
```python
import os
import sys
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.dirname(__file__)) + os.path.sep + os.path.pardir)
from common.net import add_noise
class Generator(chainer.Chain):
def __init__(self, n_hidden=128, bottom_width=4, ch=512, wscale=0.02):
super(Generator, self).__init__()
self.n_hidden = n_hidden
self.ch = ch
self.bottom_width = bottom_width
with self.init_scope():
w = chainer.initializers.Normal(wscale)
self.l0 = L.Linear(self.n_hidden, bottom_width * bottom_width * ch,
initialW=w)
self.dc1 = L.Deconvolution2D(ch, ch // 2, 4, 2, 1, initialW=w)
self.dc2 = L.Deconvolution2D(ch // 2, ch // 4, 4, 2, 1, initialW=w)
self.dc3 = L.Deconvolution2D(ch // 4, ch // 8, 4, 2, 1, initialW=w)
self.dc4 = L.Deconvolution2D(ch // 8, 3, 3, 1, 1, initialW=w)
self.bn0 = L.BatchNormalization(bottom_width * bottom_width * ch)
self.bn1 = L.BatchNormalization(ch // 2)
self.bn2 = L.BatchNormalization(ch // 4)
self.bn3 = L.BatchNormalization(ch // 8)
def make_hidden(self, batchsize):
return np.random.uniform(-1, 1, (batchsize, self.n_hidden, 1, 1)) \
.astype(np.float32)
def __call__(self, z):
h = F.reshape(F.relu(self.bn0(self.l0(z))),
(len(z), self.ch, self.bottom_width, self.bottom_width))
h = F.relu(self.bn1(self.dc1(h)))
h = F.relu(self.bn2(self.dc2(h)))
h = F.relu(self.bn3(self.dc3(h)))
x = F.tanh(self.dc4(h))
return x
class Discriminator(chainer.Chain):
def __init__(self, bottom_width=4, ch=512, wscale=0.02):
w = chainer.initializers.Normal(wscale)
super(Discriminator, self).__init__()
with self.init_scope():
self.c0_0 = L.Convolution2D(3, ch // 8, 3, 1, 1, initialW=w)
self.c0_1 = L.Convolution2D(ch // 8, ch // 4, 4, 2, 1, initialW=w)
self.c1_0 = L.Convolution2D(ch // 4, ch // 4, 3, 1, 1, initialW=w)
self.c1_1 = L.Convolution2D(ch // 4, ch // 2, 4, 2, 1, initialW=w)
self.c2_0 = L.Convolution2D(ch // 2, ch // 2, 3, 1, 1, initialW=w)
self.c2_1 = L.Convolution2D(ch // 2, ch // 1, 4, 2, 1, initialW=w)
self.c3_0 = L.Convolution2D(ch // 1, ch // 1, 3, 1, 1, initialW=w)
self.l4 = L.Linear(bottom_width * bottom_width * ch, 1, initialW=w)
self.bn0_1 = L.BatchNormalization(ch // 4, use_gamma=False)
self.bn1_0 = L.BatchNormalization(ch // 4, use_gamma=False)
self.bn1_1 = L.BatchNormalization(ch // 2, use_gamma=False)
self.bn2_0 = L.BatchNormalization(ch // 2, use_gamma=False)
self.bn2_1 = L.BatchNormalization(ch // 1, use_gamma=False)
self.bn3_0 = L.BatchNormalization(ch // 1, use_gamma=False)
def __call__(self, x):
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h)))
h = F.leaky_relu(self.bn1_0(self.c1_0(h)))
h = F.leaky_relu(self.bn1_1(self.c1_1(h)))
h = F.leaky_relu(self.bn2_0(self.c2_0(h)))
h = F.leaky_relu(self.bn2_1(self.c2_1(h)))
h = F.leaky_relu(self.bn3_0(self.c3_0(h)))
return self.l4(h)
```
#### File: chainer-gan-lib/dfm/net.py
```python
import chainer.functions as F
import chainer.links as L
import numpy as np
import chainer
class Discriminator(chainer.Chain):
def __init__(self, bottom_width=2, ch=512, wscale=0.02):
w = chainer.initializers.Normal(wscale)
super(Discriminator, self).__init__()
with self.init_scope():
self.c0 = L.Convolution2D(3, ch // 16, 3, 1, 1, initialW=w)
self.c1 = L.Convolution2D(ch // 16, ch // 8, 4, 2, 1, initialW=w)
self.c2 = L.Convolution2D(ch // 8, ch // 4, 4, 2, 1, initialW=w)
self.c3 = L.Convolution2D(ch // 4, ch // 2, 4, 2, 1, initialW=w)
self.c4 = L.Convolution2D(ch // 2, ch // 1, 4, 2, 1, initialW=w)
self.l5 = L.Linear(bottom_width * bottom_width * ch, 1, initialW=w)
self.bn1 = L.BatchNormalization(ch // 8)
self.bn2 = L.BatchNormalization(ch // 4)
self.bn3 = L.BatchNormalization(ch // 2)
self.bn4 = L.BatchNormalization(ch // 1, use_gamma=False)
def __call__(self, x):
h = x
h = F.leaky_relu(self.c0(h))
h = F.leaky_relu(self.bn1(self.c1(h)))
h = F.leaky_relu(self.bn2(self.c2(h)))
h = F.leaky_relu(self.bn3(self.c3(h)))
feature = self.bn4(self.c4(h))
h = F.leaky_relu(feature)
return feature, self.l5(h)
class Denoiser(chainer.Chain):
def __init__(self):
super(Denoiser, self).__init__()
with self.init_scope():
self.l0 = L.Linear(2048, 2048)
self.l1 = L.Linear(2048, 2048)
self.l2 = L.Linear(2048, 2048)
self.l3 = L.Linear(2048, 2048)
self.l4 = L.Linear(2048, 2048)
self.l5 = L.Linear(2048, 2048)
self.l6 = L.Linear(2048, 2048)
self.l7 = L.Linear(2048, 2048)
self.l8 = L.Linear(2048, 2048)
self.l9 = L.Linear(2048, 2048)
self.bn0 = L.BatchNormalization(2048)
self.bn1 = L.BatchNormalization(2048)
self.bn2 = L.BatchNormalization(2048)
self.bn3 = L.BatchNormalization(2048)
self.bn4 = L.BatchNormalization(2048)
self.bn5 = L.BatchNormalization(2048)
self.bn6 = L.BatchNormalization(2048)
self.bn7 = L.BatchNormalization(2048)
self.bn8 = L.BatchNormalization(2048)
def __call__(self, x):
h = F.reshape(x, (len(x), 2048))
h = F.leaky_relu(self.bn0(self.l0(h)))
h = F.leaky_relu(self.bn1(self.l1(h)))
h = F.leaky_relu(self.bn2(self.l2(h)))
h = F.leaky_relu(self.bn3(self.l3(h)))
h = F.leaky_relu(self.bn4(self.l4(h)))
h = F.leaky_relu(self.bn5(self.l5(h)))
h = F.leaky_relu(self.bn6(self.l6(h)))
h = F.leaky_relu(self.bn7(self.l7(h)))
h = F.leaky_relu(self.bn8(self.l8(h)))
return F.reshape(self.l9(h), (len(x), 512, 2, 2))
```
#### File: chainer-gan-lib/minibatch_discrimination/net.py
```python
import chainer.functions as F
import chainer.links as L
import numpy as np
import chainer
class Discriminator(chainer.Chain):
def __init__(self, bottom_width=4, ch=512, wscale=0.02, B=100, C=5):
w = chainer.initializers.Normal(wscale)
self.B = B
self.C = C
super(Discriminator, self).__init__()
with self.init_scope():
self.c0_0 = L.Convolution2D(3, ch // 8, 3, 1, 1, initialW=w)
self.c0_1 = L.Convolution2D(ch // 8, ch // 4, 4, 2, 1, initialW=w)
self.c1_0 = L.Convolution2D(ch // 4, ch // 4, 3, 1, 1, initialW=w)
self.c1_1 = L.Convolution2D(ch // 4, ch // 2, 4, 2, 1, initialW=w)
self.c2_0 = L.Convolution2D(ch // 2, ch // 2, 3, 1, 1, initialW=w)
self.c2_1 = L.Convolution2D(ch // 2, ch // 1, 4, 2, 1, initialW=w)
self.c3_0 = L.Convolution2D(ch // 1, ch // 1, 3, 1, 1, initialW=w)
self.md = L.Linear(bottom_width * bottom_width * ch, B * C, initialW=w)
self.l4 = L.Linear(bottom_width * bottom_width * ch + B, 1, initialW=w)
self.bn0_1 = L.BatchNormalization(ch // 4, use_gamma=False)
self.bn1_0 = L.BatchNormalization(ch // 4, use_gamma=False)
self.bn1_1 = L.BatchNormalization(ch // 2, use_gamma=False)
self.bn2_0 = L.BatchNormalization(ch // 2, use_gamma=False)
self.bn2_1 = L.BatchNormalization(ch // 1, use_gamma=False)
self.bn3_0 = L.BatchNormalization(ch // 1, use_gamma=False)
def __call__(self, x):
N = x.data.shape[0]
h = F.leaky_relu(self.c0_0(x))
h = F.leaky_relu(self.bn0_1(self.c0_1(h)))
h = F.leaky_relu(self.bn1_0(self.c1_0(h)))
h = F.leaky_relu(self.bn1_1(self.c1_1(h)))
h = F.leaky_relu(self.bn2_0(self.c2_0(h)))
h = F.leaky_relu(self.bn2_1(self.c2_1(h)))
feature = F.reshape(F.leaky_relu(self.c3_0(h)), (N, 8192))
m = F.reshape(self.md(feature), (N, self.B * self.C, 1))
m0 = F.broadcast_to(m, (N, self.B * self.C, N))
m1 = F.transpose(m0, (2, 1, 0))
d = F.absolute(F.reshape(m0 - m1, (N, self.B, self.C, N)))
d = F.sum(F.exp(-F.sum(d, axis=2)), axis=2) - 1
h = F.concat([feature, d])
return self.l4(h)
```
#### File: chainer-gan-lib/progressive/updater.py
```python
import numpy as np
import math
import os, sys
import chainer
import chainer.functions as F
from chainer import Variable
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.dirname(__file__)) + os.path.sep + os.path.pardir)
from common.misc import soft_copy_param
class Updater(chainer.training.StandardUpdater):
def __init__(self, *args, **kwargs):
self.gen, self.dis, self.gs = kwargs.pop('models')
self.n_dis = kwargs.pop('n_dis')
self.lam = kwargs.pop('lam')
self.gamma = kwargs.pop('gamma')
self.smoothing = kwargs.pop('smoothing')
self.stage_interval = kwargs.pop('stage_interval')
self.initial_stage = kwargs.pop('initial_stage')
self.counter = math.ceil(self.initial_stage * self.stage_interval)
super(Updater, self).__init__(*args, **kwargs)
def update_core(self):
gen_optimizer = self.get_optimizer('opt_gen')
dis_optimizer = self.get_optimizer('opt_dis')
xp = self.gen.xp
for i in range(self.n_dis):
batch = self.get_iterator('main').next()
batchsize = len(batch)
x = []
for j in range(batchsize):
x.append(np.asarray(batch[j]).astype("f"))
x_real = Variable(xp.asarray(x))
self.stage = self.counter / self.stage_interval
if math.floor(self.stage)%2==0:
reso = min(32, 4 * 2**(((math.floor(self.stage)+1)//2)))
scale = max(1, 32//reso)
if scale>1:
x_real = F.average_pooling_2d(x_real, scale, scale, 0)
else:
alpha = self.stage - math.floor(self.stage)
reso_low = min(32, 4 * 2**(((math.floor(self.stage))//2)))
reso_high = min(32, 4 * 2**(((math.floor(self.stage)+1)//2)))
scale_low = max(1, 32//reso_low)
scale_high = max(1, 32//reso_high)
if scale_low>1:
x_real_low = F.unpooling_2d(
F.average_pooling_2d(x_real, scale_low, scale_low, 0),
2, 2, 0, outsize=(reso_high, reso_high))
x_real_high = F.average_pooling_2d(x_real, scale_high, scale_high, 0)
x_real = (1-alpha)*x_real_low + alpha*x_real_high
y_real = self.dis(x_real, stage=self.stage)
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z, stage=self.stage)
y_fake = self.dis(x_fake, stage=self.stage)
x_fake.unchain_backward()
eps = xp.random.uniform(0, 1, size=batchsize).astype("f")[:, None, None, None]
x_mid = eps * x_real + (1.0 - eps) * x_fake
x_mid_v = Variable(x_mid.data)
y_mid = F.sum(self.dis(x_mid_v, stage=self.stage))
dydx, = chainer.grad([y_mid], [x_mid_v], enable_double_backprop=True)
dydx = F.sqrt(F.sum(dydx*dydx, axis=(1, 2, 3)))
loss_gp = self.lam * F.mean_squared_error(dydx, self.gamma * xp.ones_like(dydx.data)) * (1.0/self.gamma**2)
loss_dis = F.sum(-y_real) / batchsize
loss_dis += F.sum(y_fake) / batchsize
# prevent drift factor
loss_dis += 0.001 * F.sum(y_real**2) / batchsize
loss_dis_total = loss_dis + loss_gp
self.dis.cleargrads()
loss_dis_total.backward()
dis_optimizer.update()
loss_dis_total.unchain_backward()
# train generator
z = Variable(xp.asarray(self.gen.make_hidden(batchsize)))
x_fake = self.gen(z, stage=self.stage)
y_fake = self.dis(x_fake, stage=self.stage)
loss_gen = F.sum(-y_fake) / batchsize
self.gen.cleargrads()
loss_gen.backward()
gen_optimizer.update()
# update smoothed generator
soft_copy_param(self.gs, self.gen, 1.0-self.smoothing)
chainer.reporter.report({'loss_dis': loss_dis})
chainer.reporter.report({'loss_gen': loss_gen})
chainer.reporter.report({'loss_gp': loss_gp})
chainer.reporter.report({'g': F.mean(dydx)})
chainer.reporter.report({'stage': self.stage})
self.counter += batchsize
``` |
{
"source": "12860/dlflow",
"score": 3
} |
#### File: dlflow/mgr/errors.py
```python
class BaseError(Exception):
def __init__(self, err_info=""):
super(BaseError, self).__init__(self)
self.err_info = err_info
def __str__(self):
return str(self.err_info)
class InstantiateNotAllowed(BaseError):
pass
class FileTypeNotSupport(BaseError):
pass
class ParameterError(BaseError):
pass
class RegisterKeyDuplicate(BaseError):
pass
class NotInitializeError(BaseError):
pass
class FmapNotExists(BaseError):
pass
class CircularReferences(BaseError):
pass
class ParserNotExists(BaseError):
pass
class FeatureEncodeError(BaseError):
pass
class FeatureNotBind(BaseError):
pass
```
#### File: models/internal/DNNBinaryClassifier.py
```python
from dlflow.mgr import model, config
from dlflow.models import ModelBase
import tensorflow as tf
class _Embedding(tf.keras.layers.Layer):
def __init__(self, input_dim, output_dim):
super(_Embedding, self).__init__()
self.input_dim = input_dim
self.output_dim = output_dim
def build(self, input_shape):
self.embedding = self.add_weight(name="emb_w",
shape=[self.input_dim,
self.output_dim],
initializer='uniform')
def call(self, inputs, **kwargs):
emb = tf.nn.embedding_lookup(self.embedding, inputs)
out_dim = inputs.shape[-1] * self.output_dim
return tf.reshape(emb, [-1, out_dim])
@model.reg("DNNBinaryClassifier")
class DNNBinaryClassifier(ModelBase):
cfg = config.setting(
config.req("MODEL.layers"),
config.opt("MODEL.learning_rate", 0.001),
config.opt("MODEL.batch_size", 128)
)
def __init__(self, fmap):
super(DNNBinaryClassifier, self).__init__(fmap)
self.optimizer = tf.keras.optimizers.Adam(
learning_rate=config.MODEL.learning_rate)
self.compute_loss = tf.keras.losses.BinaryCrossentropy(
from_logits=True)
self.mean_loss = tf.keras.metrics.Mean()
self.acc = tf.keras.metrics.BinaryAccuracy()
self.auc = tf.keras.metrics.AUC()
self.metrics = {
"mean_loss": self.mean_loss,
"acc": self.acc,
"auc": self.auc
}
def build(self):
concat_list = self.get_inputs(tp="nums")
for ctg_inp, depth in self.get_inputs(tp="ctgs", with_depth=True):
_emb = _Embedding(depth, 6)(ctg_inp)
concat_list.append(_emb)
net = tf.concat(concat_list, axis=1)
for size in config.MODEL.layers:
net = tf.keras.layers.Dense(size, activation=tf.nn.relu)(net)
logits = tf.keras.layers.Dense(1)(net)
sigmoid = tf.nn.sigmoid(logits)
self.set_output(logits, "logits")
self.set_output(sigmoid, "sigmoid")
@tf.function
def train(self, feature, label):
_label = label["label"]
with tf.GradientTape() as tape:
logits, sigmoid = self.model(feature)
loss = self.compute_loss(_label, logits)
grads = tape.gradient(loss, self.model.trainable_variables)
self.optimizer.apply_gradients(
zip(grads, self.model.trainable_variables))
self.mean_loss(loss)
self.acc(_label, sigmoid)
self.auc(_label, sigmoid)
@tf.function
def evaluate(self, feature, label):
_label = label["label"]
logits, sigmoid = self.model(feature)
loss = self.compute_loss(_label, logits)
self.mean_loss(loss)
self.acc(_label, sigmoid)
self.auc(_label, sigmoid)
@tf.function
def predict(self, feature):
pred = self.model(feature)
return pred
```
#### File: models/internal/NNDenseInput.py
```python
from dlflow.mgr import model, config
from dlflow.models import InputBase
from dlflow.features import PRESET_BUCKETS
from collections import OrderedDict
import tensorflow as tf
@model.reg("NNDenseInput")
class NNDenseInput(InputBase):
cfg = config.setting(
config.opt("MODEL.epochs", None),
config.opt("MODEL.batch_size", 1),
config.opt("MODEL.parallel", 4),
config.opt("MODEL.shuffle_size", None),
config.opt("MODEL.drop_remainder", False),
config.opt("MODEL.buffer_size", None)
)
def __init__(self, fmap):
super(NNDenseInput, self).__init__(fmap)
def tfr_inputs(self, files):
feature_dict = OrderedDict()
for fe in self.fmap.primary_keys.get_features():
feature_dict[fe.name] = self._TF_FEATURE[fe.fetype]([1])
for fe in self.fmap.labels.get_features():
feature_dict[fe.name] = self._TF_FEATURE[fe.fetype]([1])
buckets = self.fmap.get_buckets(drop=PRESET_BUCKETS)
for bucket in buckets:
nums_size = bucket.nums.fe_size
ctgs_size = bucket.ctgs.fe_count
if nums_size > 0:
name = "_".join([bucket.name, "nums"])
feature_dict[name] = self._float_feature([nums_size])
if ctgs_size > 0:
name = "_".join([bucket.name, "ctgs"])
feature_dict[name] = self._int_feature([ctgs_size])
def _parse_single_example(example):
feature = tf.io.parse_single_example(example, feature_dict)
return feature
parallel = config.MODEL.parallel
dataset = tf.data \
.TFRecordDataset(filenames=files,
buffer_size=config.MODEL.buffer_size,
num_parallel_reads=parallel) \
.map(map_func=_parse_single_example,
num_parallel_calls=parallel) \
.batch(batch_size=config.MODEL.batch_size,
drop_remainder=config.MODEL.drop_remainder) \
.repeat(count=config.MODEL.epochs)
if config.MODEL.shuffle_size:
dataset = dataset.shuffle(config.MODEL.shuffle_size)
return dataset
def rdd_inputs(self, rdd, batch_size):
primary_keys = []
features = []
out_dtype = []
out_shape = []
for fe in self.fmap.primary_keys.get_features():
primary_keys.append(fe.name)
out_dtype.append(self._TF_TYPE[fe.fetype])
out_shape.append(tf.TensorShape([fe.size]))
buckets = self.fmap.get_buckets(drop=PRESET_BUCKETS)
for bucket in buckets:
nums_size = bucket.nums.fe_size
ctgs_size = bucket.ctgs.fe_count
if nums_size > 0:
name = "_".join([bucket.name, "nums"])
features.append(name)
out_dtype.append(tf.float32)
out_shape.append(tf.TensorShape(nums_size))
if ctgs_size > 0:
name = "_".join([bucket.name, "ctgs"])
features.append(name)
out_dtype.append(tf.int64)
out_shape.append(tf.TensorShape(ctgs_size))
def rdd_generator():
for row in rdd:
row_data = []
for k in primary_keys:
row_data.append([row[k]])
for k in features:
row_data.append(list(row[k]))
yield tuple(row_data)
dataset = tf.data.Dataset \
.from_generator(generator=rdd_generator,
output_shapes=tuple(out_shape),
output_types=tuple(out_dtype)) \
.batch(batch_size, drop_remainder=False)
return dataset
```
#### File: dlflow/models/model_base.py
```python
from dlflow.features import PRESET_BUCKETS, PKEY_NAME, LABEL_NAME
from pathlib import Path
from collections import OrderedDict
from absl import logging
import tensorflow as tf
import numpy as np
import abc
class ModelBase(metaclass=abc.ABCMeta):
def __init__(self, fmap):
super(ModelBase, self).__init__()
self.metrics = {}
self.msg_frac = 100
self._model = None
self.pkey_names = []
self.label_names = []
self.feature_names = []
self.output_names = []
self.inputs = OrderedDict()
self.outputs = OrderedDict()
self._build(fmap)
@property
def model(self):
return self._model
@abc.abstractmethod
def build(self, *args, **kwargs):
pass
def train(self, feature, label):
pass
def predict(self, feature):
pass
def evaluate(self, feature, label):
pass
def train_act(self, dataset):
step = 0
for data in dataset:
step += 1
_, label, feature = self.unpack_data(data)
self.train(feature, label)
if step % self.msg_frac == 0:
self.show_metrics(step)
self.show_metrics(step)
def predict_act(self, dataset):
len_pkey = len(self.pkey_names)
len_output = len(self.output_names)
pred_pkeys = [[] for _ in range(len_pkey)]
pred_outputs = [[] for _ in range(len_output)]
for _data in dataset:
data = list(_data)
pkey = data[:len_pkey]
feature = data[len_pkey:]
if len(feature) == 1:
feature = feature[0]
outputs = self.predict(feature)
if isinstance(outputs, tf.Tensor):
outputs = [outputs]
for l, k in zip(pred_pkeys, pkey):
if k.shape[-1] == 1:
k_np = k.numpy().squeeze(axis=len(k.shape) - 1)
else:
k_np = k.numpy()
l.append(k_np)
for l, p in zip(pred_outputs, outputs):
if p.shape[-1] == 1:
p_np = p.numpy().squeeze(axis=len(p.shape) - 1)
else:
p_np = p.numpy()
l.append(p_np)
np_list = []
for i in pred_pkeys:
_keys = np.concatenate(i, axis=0).tolist()
if isinstance(_keys[0], bytes):
_keys = [e.decode("utf-8") for e in _keys]
np_list.append(_keys)
for i in pred_outputs:
np_list.append(np.concatenate(i, axis=0).tolist())
res = []
for item in zip(*np_list):
res.append([i for i in item])
return res
def evaluate_act(self, dataset):
step = 0
for data in dataset:
step += 1
_, label, feature = self.unpack_data(data)
self.evaluate(feature, label)
if step % self.msg_frac == 0:
self.show_metrics(step)
self.show_metrics(step)
def _build(self, fmap):
self.build_inputs(fmap)
inputs = self.get_inputs()
self.build()
outputs = self.get_outputs()
self._model = tf.keras.Model(inputs=inputs, outputs=outputs)
def save(self, save_dir):
save_dir = Path(save_dir)
h5dir = save_dir.joinpath("h5weights")
if not h5dir.is_dir():
h5dir.mkdir(parents=True)
self._model.save(save_dir.as_posix())
self._model.save_weights(h5dir.joinpath("weights.h5").as_posix())
def load_model(self, load_dir):
load_dir = Path(load_dir)
self._model = tf.keras.models.load_model(load_dir.as_posix())
def load_weights(self, load_dir):
weight_path = Path(load_dir)
if weight_path.suffix == ".h5":
self._model.load_weights(weight_path.as_posix())
else:
self._model.load_weights(
weight_path.joinpath("h5weights", "weights.h5").as_posix())
def unpack_data(self, data):
pkey = [data[n] for n in self.pkey_names]
label = {n: data[n] for n in self.label_names}
feature = [data[n] for n in self.feature_names]
return pkey, label, feature
def build_inputs(self, fmap):
for fe in fmap[PKEY_NAME].get_features():
self.pkey_names.append(fe.name)
for fe in fmap[LABEL_NAME].get_features():
self.label_names.append(fe.name)
buckets = fmap.get_buckets(drop=PRESET_BUCKETS)
for bucket in buckets:
nums_size = bucket.nums.fe_size
ctgs_size = bucket.ctgs.fe_count
self.inputs[bucket.name] = OrderedDict()
if nums_size > 0:
num_name = "_".join([bucket.name, "nums"])
num_input = tf.keras.Input(shape=nums_size,
dtype=tf.float32,
name=num_name)
self.feature_names.append(num_name)
info = {
"input": num_input,
"depth": nums_size
}
self.inputs[bucket.name]["nums"] = info
if ctgs_size > 0:
ctgs_name = "_".join([bucket.name, "ctgs"])
ctg_input = tf.keras.Input(shape=ctgs_size,
dtype=tf.int64,
name=ctgs_name)
emb_depth = bucket.ctgs.fe_size
self.feature_names.append(ctgs_name)
info = {
"input": ctg_input,
"depth": emb_depth
}
self.inputs[bucket.name]["ctgs"] = info
def get_inputs(self, tp=None, with_depth=False):
inputs = []
if tp is None:
for field_inputs in self.inputs.values():
for _field in field_inputs.values():
if with_depth:
_item = (_field["input"], _field["depth"])
else:
_item = _field["input"]
inputs.append(_item)
else:
for field_inputs in self.inputs.values():
if tp not in field_inputs:
continue
_field = field_inputs[tp]
if with_depth:
_item = (_field["input"], _field["depth"])
else:
_item = _field["input"]
inputs.append(_item)
return inputs
def get_outputs(self):
outputs = []
for _, output in self.outputs.items():
outputs.append(output)
if len(outputs) == 1:
outputs = [outputs]
return outputs
def show_metrics(self, step):
msg = "Step: {}".format(step)
for name, metric in self.metrics.items():
msg += ", {}: {:>.4f}".format(name, metric.result())
logging.info(msg)
def set_output(self, output, name):
self.outputs[name] = output
self.output_names.append(name)
```
#### File: dlflow/utils/logger.py
```python
import os
import logging
import absl.logging as absl_logging
_ABSL_LOGGING_LEVEL = {
"debug": absl_logging.DEBUG,
"info": absl_logging.INFO,
"warn": absl_logging.WARNING,
"error": absl_logging.ERROR
}
DEFAULT_INFO_FMT = "%(asctime)s [%(levelname)s] - %(message)s"
DEFAULT_DEBUG_FMT = "%(asctime)s %(filename)s:%(lineno)d " \
"[%(levelname)s] - %(message)s"
def logging_initialize(log_level=None, fmt_str=None):
absl_handler = absl_logging.get_absl_handler()
if log_level is None:
log_level = "info"
if fmt_str is None:
fmt_str = DEFAULT_DEBUG_FMT
formatter = logging.Formatter(fmt_str)
absl_handler.setFormatter(formatter)
set_logging_level(log_level)
def set_logging_level(log_level):
if log_level.lower() not in _ABSL_LOGGING_LEVEL:
raise ValueError("Logging initialize error. Can not recognize value of"
" <log_level> which by given '{}' , except 'debug',"
" 'info', 'warn', 'error'.".format(log_level))
absl_handler = absl_logging.get_absl_handler()
if absl_handler in logging.root.handlers:
logging.root.removeHandler(absl_handler)
absl_logging.set_verbosity(_ABSL_LOGGING_LEVEL[log_level])
absl_logging.set_stderrthreshold(_ABSL_LOGGING_LEVEL[log_level])
absl_logging._warn_preinit_stderr = False
logging.root.addHandler(absl_handler)
def set_logging_writer(log_name, log_dir):
absl_handler = absl_logging.get_absl_handler()
if not os.path.exists(log_dir):
os.makedirs(log_dir)
absl_handler.use_absl_log_file(program_name=log_name, log_dir=log_dir)
def get_logging_info_str():
info = ["\n=== === LOGGING INFO === ==="]
for _name, _logger in logging.Logger.manager.loggerDict.items():
if hasattr(_logger, "handlers"):
_handler = _logger.handlers
else:
_handler = "None"
_str = " * {} : {} - {}".format(_name, _logger, _handler)
info.append(_str)
info.append("=== === LOGGING INFO === ===\n")
return "\n".join(info)
```
#### File: dmflow/example/run_spark.py
```python
from __future__ import print_function
import os
import sys
import logging
import subprocess
import argparse
import pprint
SPARK_PREFIX = "dmflow"
JAR_NAME = "dmflow-1.0.0-SNAPSHOT.jar"
DEBUG = False
PROD = False
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s [%(levelname)s] - %(message)s',
datefmt='%Y-%m-%d %a %H:%M:%S',
stream=sys.stdout)
class Config:
def __init__(self):
self.MaxExecutors = 300
self.parallelism = 1000
self.cores = 2
self.APP_PREFIX = SPARK_PREFIX
self.YARN_QUEUE = "root.my_yarn_queue"
def __repr__(self):
return """MLflow Runner Config (
------------- Project Config ------------
YARN_QUEUE: {0}
------------- Spark Runtime Config ------------
MaxExecutors: {1}
parallelism: {2}
cores: {3}
)
""".format(self.YARN_QUEUE,
self.MaxExecutors,
self.parallelism,
self.cores)
class HDFS:
@staticmethod
def exists(hdfs_path):
cmd = "hadoop fs -test -e " + hdfs_path
logging.debug("run shell: " + cmd)
ret = subprocess.call(cmd, shell=True)
return True if ret == 0 else False
@staticmethod
def copyTolocal(hdfs_path):
cmd = "hadoop fs -get {0} .".format(hdfs_path)
logging.debug("run shell: " + cmd)
subprocess.call(cmd, shell=True)
class ExecWrapper:
cmd = 'echo "replace cmd as your command"'
def run(self):
cmd = self.cmd
logging.debug(r'exec cmd: %s' % cmd)
p = subprocess.Popen(cmd,
shell=True,
bufsize=1,
universal_newlines=True)
p.wait()
sys.stdout.flush()
code = p.returncode
if code != 0:
raise RuntimeError(
"subprocess run shell failed! ret=" + str(code))
class ShowSparkVersion(ExecWrapper):
def __init__(self):
self.cmd = "spark-submit --version"
class FeatureFlowRunner(ExecWrapper):
TEMPLATE = '''
spark-submit --queue {YARN_QUEUE} \
--class com.didi.dm.dmflow.FeatureFlowRunner \
--name "{appName}" \
--driver-memory 2g \
--executor-memory 12g \
--conf "spark.dynamicAllocation.enabled=true" \
--conf "spark.driver.maxResultSize=1g" \
--conf "spark.dynamicAllocation.minExecutors=100" \
--conf "spark.dynamicAllocation.maxExecutors={MaxExecutors}" \
--conf "spark.yarn.executor.memoryOverhead=3g" \
--conf "spark.sql.shuffle.partitions={parallelism}" \
--conf "spark.default.parallelism={parallelism}" \
--conf "spark.executor.cores={cores}" \
--driver-java-options "-Dlog4j.configuration=file:log4j.properties" \
{JAR_NAME} \
--seedSQL "{seedSQL}" \
--featureConfig "{featureConfig}" \
--featureDate "{featureDate}" \
--featureModelHDFS "{featureModelHDFS}" \
--featureOutHDFS "{featureOutHDFS}" {fit_args}
'''
def __init__(self,
seedSQL,
featureConfig,
featureDate,
featureModelHDFS,
featureOutHDFS,
fit):
fit_args = "--fit false"
if fit is True:
assert os.path.exists(featureConfig), \
"input featureConfig is not exists when fit=true: " \
+ featureConfig
fit_args = "--fit true"
def isNullOrEmpty(seedSQL):
assert isinstance(seedSQL, str), "输入的参数不是字符串"
return True \
if not seedSQL or len(seedSQL.strip()) == 0 else False
params = {
"YARN_QUEUE": config.YARN_QUEUE,
"appName": ".".join([SPARK_PREFIX, featureDate]),
"JAR_NAME": JAR_NAME,
"MaxExecutors": config.MaxExecutors,
"parallelism": config.parallelism,
"cores": config.cores,
"seedSQL": seedSQL,
"featureConfig": featureConfig,
"featureDate": featureDate,
"featureModelHDFS": featureModelHDFS,
"featureOutHDFS": featureOutHDFS,
"fit_args": fit_args
}
self.cmd = self.TEMPLATE.replace("\n", " ").format(**params)
if __name__ == '__main__':
print("[RUN]", ' '.join(sys.argv))
parser = argparse.ArgumentParser(description='MLflow Binary Runner')
parser.add_argument('-f',
'--conf',
help='特征合并的配置文件地址',
required=True)
parser.add_argument('-s',
'--sql',
help='label人群的SQL字符串',
default='')
parser.add_argument('-d',
'--date',
help='执行的特征日期',
default='')
parser.add_argument('-m',
'--featureModelHDFS',
help='模型保存的HDFS地址',
default='')
parser.add_argument("-o",
'--featureOutHDFS',
help='合并后的特征产出HDFS地址',
default='')
parser.add_argument('--fit',
help='是否根据配置生成模型?否的情况会直接读取模型',
action='store_true')
parser.add_argument("-v",
"--verbosity",
help="increase output verbosity",
action="store_true")
opt = parser.parse_args()
logging.debug("输入的CLI参数如下:")
pprint.pprint(vars(opt))
config = Config()
logging.debug(config)
FeatureFlowRunner(
seedSQL=opt.sql,
featureConfig=opt.conf,
featureDate=opt.date,
featureModelHDFS=opt.featureModelHDFS,
featureOutHDFS=opt.featureOutHDFS,
fit=opt.fit
).run()
```
#### File: mnist_example/model/mnist_classifier.py
```python
from dlflow.mgr import model, config
from dlflow.models import ModelBase
import tensorflow as tf
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, Conv2D
from tensorflow.keras.layers import ReLU, Dropout, Flatten
from tensorflow.keras.layers import BatchNormalization, MaxPooling2D
@model.reg("mnist_cnn")
class MyModel(ModelBase):
cfg = config.setting(
config.req("MODEL.learning_rate"),
config.req("MODEL.classes"),
config.req("MODEL.layers"),
config.opt("MODEL.batch_size", 8)
)
def __init__(self, fmap):
super(MyModel, self).__init__(fmap)
self.optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
self.compute_loss = tf.keras.losses.SparseCategoricalCrossentropy()
self.mean_loss = tf.keras.metrics.Mean()
self.acc = tf.keras.metrics.SparseCategoricalAccuracy()
self.metrics = {
"mean_loss": self.mean_loss,
"acc": self.acc
}
self.msg_frac = 10
def build(self):
concat_list = self.get_inputs(tp="nums")
images = tf.concat(concat_list, axis=1)
images = tf.reshape(images, (-1, 28, 28, 1))
output = CNN(n_class=10)(images)
arg_max = tf.argmax(output, axis=1)
self.set_output(output, "softmax")
self.set_output(arg_max, "argmax")
@tf.function
def train(self, feature, label):
_label = label["label"]
with tf.GradientTape() as tape:
output, _ = self.model(feature)
loss = self.compute_loss(_label, output)
grads = tape.gradient(loss, self.model.trainable_variables)
self.optimizer.apply_gradients(
zip(grads, self.model.trainable_variables))
self.mean_loss(loss)
self.acc(_label, output)
@tf.function
def evaluate(self, feature, label):
_label = label["label"]
output, _ = self.model(feature)
loss = self.compute_loss(_label, output)
self.mean_loss(loss)
self.acc(_label, output)
@tf.function
def predict(self, feature):
pred = self.model(feature)
return pred
class CNN(Model):
def __init__(self, n_class=10):
super(CNN, self).__init__()
self.conv1 = Conv2D(32, (3, 3), activation='relu')
self.conv2 = Conv2D(64, (3, 3), activation='relu')
self.max_pooing2d = MaxPooling2D((2, 2))
self.flatten = Flatten()
self.dense1 = Dense(64, activation='relu')
self.dense2 = Dense(n_class, activation='softmax')
def call(self, inputs):
x = self.conv1(inputs)
x = self.max_pooing2d(x)
x = self.conv2(x)
x = self.max_pooing2d(x)
x = self.flatten(x)
x = self.dense1(x)
x = self.dense2(x)
return x
```
#### File: dlflow/static_task/DemoTask.py
```python
from dlflow.tasks import TaskNode
from dlflow.mgr import task, config
from absl import logging
@task.reg("model register name")
class DemoTask(TaskNode):
parent_tag = TaskNode.set_tag("PARENT_TAG")
output_tag = TaskNode.set_tag("OUTPUT_TAG")
bind_tasks = "task name or list of tasks"
cfg = config.setting(
config.req("DemoParam")
)
def __init__(self):
super(DemoTask, self).__init__()
@TaskNode.timeit
def run(self):
logging.info("Running {}".format(self.__class__.__name__))
``` |
{
"source": "128technology/blaster",
"score": 2
} |
#### File: webapp/blaster/menu.py
```python
from flask import (
Blueprint, flash, g, redirect, render_template, request, url_for
)
bp = Blueprint('menu', __name__)
@bp.route('/')
def home():
return render_template('menu.html')
```
#### File: webapp/blaster/quickstart.py
```python
import functools
from flask import (
current_app, Blueprint, flash, Flask, g, redirect, render_template, request, session, url_for, jsonify
)
import json
from blaster.db import get_db
from . import constants
bp = Blueprint('quickstart', __name__, url_prefix='/quickstart')
@bp.route('/<instance>')
def instantiate(instance=None):
db = get_db()
node_row = db.execute('SELECT quickstart_id from node WHERE identifier = ?', (instance,)).fetchone()
if node_row is None:
qs_row = db.execute('SELECT node_name, asset_id, config FROM quickstart WHERE default_quickstart > 0').fetchone()
else:
if node_row[0] is None:
qs_row = db.execute('SELECT node_name, asset_id, config FROM quickstart WHERE default_quickstart > 0').fetchone()
else:
qs_row = db.execute('SELECT node_name, asset_id, config FROM quickstart WHERE id = ?', (node_row['quickstart_id'],)).fetchone()
if qs_row is None:
return jsonify(error="Could not find a specific or default quickstart"), 404
response = {}
quickstart = {
'a': qs_row['asset_id'],
'n': qs_row['node_name'],
'c': qs_row['config']
}
response['quickstart'] = json.dumps(quickstart)
response['password'] = <PASSWORD>
db.execute('UPDATE node SET status = ? WHERE identifier = ?', ('Bootstrapped', instance))
db.commit()
return jsonify(response)
``` |
{
"source": "128technology/rules_pkg",
"score": 2
} |
#### File: pkg/releasing/print_rel_notes.py
```python
import sys
from string import Template
import textwrap
from releasing import release_tools
def print_notes(repo, version, tarball_path, org='bazelbuild'):
file_name = release_tools.package_basename(repo, version)
sha256 = release_tools.get_package_sha256(tarball_path)
url = 'https://github.com/%s/%s/releases/download/%s/%s' % (
org, repo, version, file_name)
workspace_stanza = release_tools.workspace_content(url, repo, sha256)
relnotes_template = Template(textwrap.dedent(
"""
------------------------ snip ----------------------------
**New Features**
**Incompatible Changes**
**WORKSPACE setup**
```
${workspace_stanza}
```
**Using the rules**
See [the source](https://github.com/${org}/${repo}/tree/master).
------------------------ snip ----------------------------
""").strip())
print(relnotes_template.substitute({
'org': org,
'repo': repo,
'workspace_stanza': workspace_stanza,
}))
def main(args):
print_notes(repo=args[1], version=args[2], tarball_path=args[3])
if __name__ == '__main__':
main(sys.argv)
``` |
{
"source": "128technology/stage_check",
"score": 2
} |
#### File: stage_check/stage_check/AbstractTest.py
```python
import importlib
import re
import os
import sys
import getpass
import datetime
import shlex
import subprocess
import pprint
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import EntryTest
except ImportError:
import EntryTest
try:
from stage_check import Linux
except ImportError:
import Linux
class Base(object):
"""
Note that individual tests are unaware of their node context. It is up
to the invoker to understand that some tests run on the primary node
and others on the secondary, while still others run on both.
"""
def __init__(self, test_id, config, args):
self._debug = args.debug
self.desc_string = ''
self._test_id = str(test_id)
self._schema = {}
self.message_list = [ ]
self.results = {}
self.output = None
self._version = "0.0.0"
# These are set after creation to give the test context
# about what has transpired so far (helpful for progressive
# json output)
self.__last_test_id = 0
self.__router_index = 0
self.__last_router_index = 0
try:
self.desc_string = config["Description"]
except KeyError:
self.desc_string = "NO DESC PROVIDED"
self.base_name = 'AbstractTest'
self.module_name = config["TestModule"]
self.output_suffix = config["OutputModule"]
try:
self.__enabled = config["Enable"]
except KeyError:
self.__enabled = True
try:
self.params = config["Parameters"]
except Exception as e:
print(f"***********************************")
print(f"* Test: {type(self).__name__}")
print(f"* Error validating test parameters ")
print(f"* EXITING...")
print(f"***********************************")
print(e)
sys.exit(1)
def requires_graphql(self):
return False
def requires_netconf(self):
return False
def resource_greedy(self):
return False
def description(self):
return f'{self.test_id:>2} {self.desc_string}'
def desc(self):
return self.desc_string
@property
def enabled(self):
return self.__enabled
@property
def debug(self):
return self._debug
@property
def test_id(self):
return self._test_id
@property
def schema(self):
return self._schema
@property
def version(self):
return self._version
@property
def last_test_id(self):
return self.__last_test_id
@last_test_id.setter
def last_test_id(self, last_test_id):
self.__last_test_id = last_test_id
@property
def router_index(self):
return self.__router_index
@router_index.setter
def router_index(self, router_index):
self.__router_index = router_index
@property
def last_router_index(self):
return self.__last_router_index
@router_index.setter
def last_router_index(self, last_router_index):
self.__last_router_index = last_router_index
def apply_default_params(self, default_params):
params = self.params.copy()
for param in default_params:
if not param in params:
params[param] = default_params[param]
if self.debug:
print(f'------ {self.description()} ------')
pprint.pprint(params)
return params
def create_output_instance(self, module_dict=None):
"""
Load configured output module
"""
self.output = None
output_name_base = "Output" + self.module_name
output_module_name = output_name_base + self.output_suffix
if module_dict is None or \
output_module_name not in module_dict:
output_module = importlib.import_module(output_module_name)
if hasattr(output_module, output_module_name):
if module_dict is not None:
module_dict[output_module_name] = output_module
# print(f"Successfully registered output plugin '{output_module_name}'")
else:
print(f"Module '{output_module_name}' has no class; skipping")
if module_dict is not None:
output_module = module_dict[output_module_name]
output = output_module.create_instance()
required_base_name = output_name_base + '.Base'
if output.full_name != required_base_name:
print(f"Output Module '{output_module_name}' must have parent '{required_base_name}'")
print(f"Parent is {output.full_name}")
sys.exit(1)
self.output = output
def run(self):
"""
Abstract test template
"""
return -1
def exclude_flat_entry(self, entry, excludes):
"""
Compare flattened list entry with special node_type and
node_name entries against a list of exlusion dictionaries
If the extry matches, it should be exluded from the test
performed on the list.
Note that a missing key from the entry will cause the
exclusion rule to fail, and thus the test should be
peformed... (consider just throwing an exception)
"""
if entry is None:
return True
entry_excluded=False
for exclude in excludes:
excluded=True
for exclude_key in exclude:
if not exclude_key in entry or \
entry[exclude_key] != exclude[exclude_key]:
excluded=False
break
if self.debug:
pprint.pprint(exclude)
print(f'Exclude: {excluded}')
if excluded:
entry_excluded=True
break
return entry_excluded
def eval_tests(self, flattened_json, entry_tests):
"""
Process test entries on the fly.
"""
eval = EvalTest.eval(self.debug)
for entry in flattened_json:
return_status = test_entry()
if self.ouput is not None:
self.output.oproctest_match(entry)
def eval_tests(self, node, test_list, default_result):
"""
Evaluate a series of tests against the provided node
A node should be an entry in the list of json
objects for testing (e.g.network interfaces, device interfaces etc.)
"""
if self.debug:
print(f'------- start eval_tests ---------')
pprint.pprint(node)
result = default_result
entry_text_result='?'
for entry in test_list:
if self.debug:
entry_str = pprint.pformat(entry)
print(f'------- start eval_tests ---------')
print(f'{entry}')
for key in entry:
if key == "status":
continue
match = self.test_key_value(node, key, entry[key])
if match is None or \
match is False:
break
if self.debug:
print(f'-------Result={result}--------------')
if match is not None and \
match is True and \
"status" in entry:
entry_text_result = entry["status"]
result = Output.text_to_status(entry_text_result)
break
if self.debug:
print(f'*******************************')
print(f' eval_tests: FINAL RESULT {entry_text_result}({result})')
print(f'*******************************')
return result
def test_key_value(self, node, key, value):
"""
node is a dictionary to look up a value
for a nested diectionary.
"""
try:
key_list = key.split(".")
for subkey in key_list:
if self.debug:
nodestr = pprint.pformat(node)
print (f'test_key_value: {nodestr} -> subkey={subkey}')
node = node[subkey]
# Consider not catching this here to make it
# more obvious to the caller...
except (KeyError, TypeError) as e:
return None
if self.debug:
print (f'test_key_value: {key} == {value}?')
return (node == value)
def test_info(self, local_info, router_context):
info={}
try:
dt = datetime.datetime.now()
info["DateTime"] = dt
#info["node_type"] = router_context.node_type()
info["StageCheckVersion"] = "1.0.0"
info["TestModule"] = self.__class__.__name__
info["TestVersion"] = self.version
info["TestDescription"] = self.desc()
info["TestIndex"] = int(self.test_id)
info["TestIndexLast"] = self.last_test_id
info["RouterIndex"] = self.router_index
info["RouterIndexLast"] = self.last_router_index
info["InvokingRouter"] = local_info.get_router_name()
info["InvokingNode"] = local_info.get_node_name()
info["InvokingRole"] = local_info.get_node_type()
info["Router"] = router_context.get_router()
"""
info["Node"] = router-context.get_node_by_type(node_type)
info["NodeType"] = node_type
info["Asset"] = router-context.get_asset_by_type(node_type)
info["SWVersion"] = router_context.get_version_by_type(node_type)
"""
except Exception as e:
print(e)
pass
# pprint.pprint(info)
return info
def test_end_by_status(self, status_list = []):
"""
Perform whatever action occurs at test_end() except
only if self.status matches an entry in the passed list
@status_list -- List of statuses to natch
"""
self.output.test_end_by_status(status_list)
class GraphQL(Base):
"""
Base class for GraphQL tests
"""
class ReplyStatus(object):
"""
Simplified Server / API status...
"""
def __init__(self):
self._server_code = 200
self._error_list = []
@property
def error_list(self):
return self._error_list
@error_list.setter
def error_list(self, value):
if isinstance(value, list):
self._error_list.extend(value)
@property
def server_code(self):
return self._server_code
@server_code.setter
def server_code(self, value):
self._server_code = value
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
def process_gql_status(self, query_status, errors=None):
error_list=[]
if errors is not None:
for entry in errors:
if "message" in entry and \
not entry["message"] in error_list:
error_list.append(entry["message"])
if query_status is None or \
query_status > 299 or \
len(error_list) > 0:
self.output.graphql_error(query_status, error_list)
return False
return True
def send_query(self, gql_node, gql_token, json_reply, status=None):
errors=[]
query_status = gql_node.send_query(gql_token, json_reply, errors)
if status is not None:
status.server_code = query_status
status.error_list = errors
return self.process_gql_status(query_status, errors)
def _workaround_graphql_api(self, intf_list):
"""
Works around a problem / bug with the graphql API not always returning state
information for a L2 HA network-interface.
"""
statemap = {}
statekey=''
for entry in intf_list:
try:
sharedPhysAddr = entry['router/nodes/deviceInterfaces/sharedPhysAddress']
if sharedPhysAddr == '':
continue
statekey = sharedPhysAddr + ':' + entry['name']
except (KeyError, TypeError):
continue
if 'state' in entry and \
entry['state'] is not None:
statemap[statekey] = entry['state']
for entry in intf_list:
try:
sharedPhysAddr = entry['router/nodes/deviceInterfaces/sharedPhysAddress']
if sharedPhysAddr == '':
continue
statekey = sharedPhysAddr + ':' + entry['name']
if 'state' not in entry or \
entry['state'] is None:
entry['state'] = statemap[statekey]
except (KeyError, TypeError):
continue
class Linux(Base, Linux.Callbacks):
"""
Base Class for tests using linux commands
"""
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
try:
self.__timeout = config["Timeout"]
except KeyError:
pass
@property
def timeout(self):
return __timeout
def run_linux_progress(self, message):
self.output.progress_display(message, self.fp)
def line_to_message(self, line, regex=None, message_format='No Format Provided'):
"""
If provided, The format is applied to matching groups from regex as
applied to line.
"""
output_string = message_format
if regex is not None:
matches = re.search(regex, line, re.MULTILINE|re.DOTALL)
if matches is not None:
index = 1
while index < 10:
try:
dest = '{' + str(index) + '}'
source = matches.group(index)
output_string = output_string.replace(dest, source)
except IndexError:
pass
index += 1
return output_string
def convert_to_json(self, text_list, pattern, regex_groups, json_data):
"""
Derived classes can override this with their own conversion routines
See TestT1Detail
"""
json_data.clear()
return json_data
def run_linux_json(self,
local_info,
router_context,
node_type,
command,
patterns,
json_data,
error_lines,
fp):
"""
"""
# run the command...
candidate_lines = []
shell_status = self.run_linux(local_info, router_context, node_type, command,
None, candidate_lines, error_lines, fp)
# convert output to json...
if len(error_lines) == 0:
self.convert_to_json(candidate_lines, patterns, json_data)
return shell_status
def run_linux(self, local_info, router_context, node_type, command,
candidate_regex, candidate_lines, error_lines, fp):
"""
"""
salt_error_indicators = []
salt_error_indicators.append('^\s*Minion did not return.')
salt_error_indicators.append('^\s*No minions matched the target.')
asset_id = router_context.get_asset_by_type(node_type)
if asset_id is None or asset_id == '':
error_lines.append(f"Missing asset info for {node_type} node")
return 0
candidate_lines.clear()
linux_command = command
if local_info.get_node_type() == 'conductor':
linux_command = 't128-salt ' + asset_id + " cmd.run '" + command + "'"
if self.debug:
if candidate_regex is not None:
print(f'Candidate Regex: {candidate_regex}')
print(f'Linux Command: {linux_command}')
print(f'Asset ID: {asset_id}')
# subprocess.DEVNULL is python 3.3+ only
self.output.progress_display(f"Run '{linux_command}'...", fp=fp)
pipe = subprocess.Popen(shlex.split(linux_command),
stdin=open(os.devnull, 'rb'),
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
close_fds=True)
for line in iter(pipe.stdout.readline, b''):
bline = line.rstrip()
xline = bline.decode('utf-8')
if self.debug:
print(f'Consider: {xline}')
if candidate_regex is not None and \
candidate_regex != '' and \
not re.search(candidate_regex, xline):
continue
candidate_lines.append(xline)
if self.debug:
print(f'*Matched: {xline}')
for indicator in salt_error_indicators:
if re.search(indicator, xline):
error_lines.append(xline.lstrip())
if self.debug:
pprint.pprint(candidate_lines)
return_code = pipe.poll()
return return_code
def check_user(self, required_user):
message = None
status = Output.Status.OK
user = getpass.getuser()
if user != required_user:
status=Output.Status.WARN
self.output.run_as_wrong_user(user, status=status)
return status
```
#### File: stage_check/stage_check/Banner.py
```python
class MissingOverload(Exception):
"""
Raised when the base class is method is not overloaded...
"""
pass
class Base(object):
def __init__(self):
self.place_holder = True
"""
Module to create Text Banners. Abstracted out because these have
no place when creating json output (in this case the only test banner
might be a comma).
"""
def header_summary(
self,
router_context,
tests_by_status
):
"""
Output summary of Router information
"""
raise MissingOverload
def header(
self,
router_context,
router_count,
router_max,
fp=None
):
"""
Invoked before a router's test is started to output
Interesting information about that Router
"""
raise MissingOverload
def trailer_summary(
self,
router_context
):
"""
Output summary of Router information
"""
raise MissingOverload
def trailer(
self,
router_context,
router_count,
router_max,
fp=None
):
"""
Invoked before a router's test is started to output
Interesting information about that Router
"""
raise MissingOverload
```
#### File: stage_check/stage_check/EntryTest.py
```python
import re
import pprint
import sly
try:
from stage_check import Output
except ImportError:
import Output
class LexerError(Exception):
"""
Raised when Lexer token extraction error occurs
w/o debug enabled...
"""
pass
class MissingOverload(Exception):
"""
Raised when MatchFunction is not overloaded by a
derived class
"""
class MatchFunction(object):
"""
Derived classes will have this method called when
"""
def process_match(entry, test_index, test, defaults):
raise MissingOverload
class TestLexer(sly.Lexer):
# Set of token names. This is always required
tokens = { INT, FLOAT, KEY, STRING, BOOL, KEY_TEST,
TYPE2STR,
PLUS, MINUS, MULT, DIVIDE,
EQ, LT, LE, GT, GE, NE,
OR, AND,
LPAREN, RPAREN }
literals = { '(', ')', '\'', '@' }
# String containing ignored characters
ignore = ' \t'
# Regular expression rules for tokens
PLUS = r'\+'
MINUS = r'-'
MULT = r'\*'
DIVIDE = r'/'
EQ = r'=='
LE = r'<='
LT = r'<'
GE = r'>='
GT = r'>'
NE = r'!='
OR = r'\|\|'
AND = r'&&'
LPAREN = r'\('
RPAREN = r'\)'
TYPE2STR = r'@'
@_(r'\d+')
def INT(self, t):
t.value = int(t.value)
return t
# Identifiers and keywords
FLOAT = r'[0-9]+\.[0-9]+'
INT = r'[0-9]+'
KEY = r'[a-zA-Z0-9]+([\.\/_][a-zA-Z0-9]+)*'
KEY_TEST = r'\?[a-zA-Z0-9]+([\.\/_][a-zA-Z0-9]+)*'
STRING = r'\'[^\']*\''
KEY['True'] = BOOL
KEY['False'] = BOOL
ignore_comment = r'\#.*'
# Line number tracking
@_(r'\n+')
def ignore_newline(self, t):
self.lineno += t.value.count('\n')
def error(self, t):
if self.debug:
print('Line %d: Bad character %r' % (self.lineno, t.value[0]))
else:
raise LexerError
self.index += 1
@property
def debug(self):
return self.__debug
@debug.setter
def debug(self, value):
self.__debug = value
class TestParser(sly.Parser):
"""
Consider adding support for unary operators:
- negativity operator
! logical negation
? dictionary key existence
Additional binary:
=~ regex match
"""
tokens = TestLexer.tokens
@property
def debug(self):
return self.__debug
@debug.setter
def debug(self, debug):
if isinstance(debug, bool):
self.__debug = debug
@property
def json_entry(self):
return self.__json_entry
@json_entry.setter
def json_entry(self, json_entry):
if isinstance(json_entry, dict):
self.__json_entry = json_entry
def infer_type(self, non_string, string):
if isinstance(non_string, int):
return int(string)
elif isinstance(non_string, float):
return float(string)
elif isinstance(non_string, bool):
return bool(string)
return None
def infer_types(self, left, right):
#print(f"INFER(I) L={left}({type(left)} R={right}({type(right)}")
if not isinstance(left, str) and \
isinstance(right, str):
right = self.infer_type(left, right)
if not isinstance(right, str) and \
isinstance(left, str):
left = self.infer_type(right, left)
#print(f"INFER(O) L={left}({type(left)} R={right}({type(right)}")
return left, right
@_('expr_or OR expr_and')
def expr_or(self, p):
if self.debug:
print(f"{p[0]} || {p[2]}: {p[0] or p[2]}")
return p[0] or p[2]
@_('expr_and')
def expr_or(self, p):
return p.expr_and
@_('expr_and AND expr_comp')
def expr_and(self,p):
if self.debug:
print(f"{p[0]} && {p[2]}: {p[0] and p[2]}")
return p[0] and p[2]
@_('expr_comp')
def expr_and(self, p):
return p.expr_comp
@_('expr_comp EQ expr_add')
def expr_comp(self, p):
if self.debug:
print(f"{p[0]} == {p[2]}: {p[0] == p[2]}")
return p[0] == p[2]
@_('expr_comp NE expr_add')
def expr_comp(self, p):
if self.debug:
print(f"{p[0]} != {p[2]}: {p[0] != p[2]}")
return p[0] != p[2]
@_('expr_comp GT expr_add')
def expr_comp(self, p):
left, right = self.infer_types(p[0], p[2])
if self.debug:
print(f"GT: {left} > {right}: {left > right}")
return left > right
@_('expr_comp LT expr_add')
def expr_comp(self, p):
left, right = self.infer_types(p[0], p[2])
if self.debug:
print(f"LT: {left} < {right}: {left < right}")
return left < right
@_('expr_comp LE expr_add')
def expr_comp(self, p):
left, right = self.infer_types(p[0], p[2])
if self.debug:
print(f"LE: {left} <= {right}: {left <= right}")
return left <= right
@_('expr_comp GE expr_add')
def expr_comp(self, p):
left, right = self.infer_types(p[0], p[2])
if self.debug:
print(f"GE: {left} >= {right}: {left >= right}")
return left >= right
@_('expr_add')
def expr_comp(self, p):
return p.expr_add
@_('expr_add PLUS expr_mult')
def expr_add(self, p):
if self.debug:
print(f"{p[0]} + {p[2]}: {p[0] + p[2]}")
return p[0] + p[2]
@_('expr_add MINUS expr_mult')
def expr_add(self, p):
if self.debug:
print(f"{p[0]} - {p[2]}: {p[0] - p[2]}")
return p[0] - p[2]
@_('expr_mult')
def expr_add(self, p):
return p.expr_mult
@_('expr_mult MULT expr')
def expr_mult(self, p):
if self.debug:
print(f"{p[0]} * {p[2]}: {p[0] * p[2]}")
return p[0] * p[2]
@_('expr_mult DIVIDE expr')
def expr_mult(self, p):
if self.debug:
print(f"{p[0]} / {p[2]}: {p[0] / p[2]}")
return p[0] / p[2]
@_('expr')
def expr_mult(self, p):
return p.expr
@_('LPAREN expr_or RPAREN')
def expr(self, p):
if self.debug:
print(f"({p.expr_or})")
return p.expr_or
@_('TYPE2STR expr')
def expr(self, p):
try:
type_name = p[1].__class__.__name__
if type_name == 'NoneType':
type_name = 'None'
except NameError:
type_name = 'Undefined'
if self.debug:
print(f"@{p[1]}: {type_name}")
return type_name
@_('term')
def expr(self, p):
return p.term
@_('INT')
def term(self, p):
return int(p[0])
@_('FLOAT')
def term(self, p):
return float(p[0])
@_('KEY')
def term(self, p):
keys = p[0].split('.')
value = self.__json_entry
for k in keys:
value = value[k]
if self.debug:
print(f"entry[{p[0]}] = {value}")
return value
@_('KEY_TEST')
def term(self, p):
"""
Treating this as a token is a bit of a hack, but it works...
"""
key = p[0]
key = key[1:]
keys = key.split('.')
value = self.__json_entry
for k in keys:
if not k in value:
if self.debug:
print(f"?entry[{key}] = False")
return False
value = value[k]
if self.debug:
print(f"?entry[{key}] = True")
return True
@_('STRING')
def term(self, p):
return p[0][1:-1]
@_('BOOL')
def term(self, p):
return p[0] == 'True'
@property
def json_entry(self):
return self.__json_entry
@json_entry.setter
def json_entry(self, value):
self.__json_entry = value
@property
def debug(self):
return self.__debug
@debug.setter
def debug(self, value):
self.__debug = value
class Parser(object):
def __init__(self, debug=False):
self.__lexer = TestLexer()
self.__parser = TestParser()
self.__lexer.debug = debug
self.__parser.debug = debug
self.__debug = debug
@property
def lexer(self):
return self.__lexer
@property
def parser(self):
return self.__parser
@property
def debug(self):
return self.__debug
def true_value(self, value):
"""
Convert a value to boolean using explicit rules
"""
bool_result = False
if isinstance(value, bool):
if value == True:
bool_result = True
elif isinstance(value, int):
if value != 0:
bool_result = True
elif isinstance(value, float):
if value != 0.0:
bool_result = True
elif isinstance(value, str):
if value != '':
bool_result = True
return bool_result
def exclude_entry(self, entry, exclude_tests):
"""
Evaluate entry against list of exclude tests. Returns True
if it matches, False if it does not.
*entry* Dictionary derived from flattened graphql json reply,
Linux command etc.
*exclude_tests* Dictionary of tests to run against this entry
"""
self.parser.json_entry = entry
matched = False
rule_number=0
for item in exclude_tests:
try:
tokens = self.lexer.tokenize(item)
result = self.parser.parse(tokens)
except Exception as e:
entry["exclude_rule"] = item
entry["exclude_exception"] = f"rule {rule_number}: {e.__class__.__name__} exception '{e}'"
break
matched = self.true_value(result)
if matched:
if self.debug:
print(f"exclude_entry[{rule_number}]: Matched {item}\n")
matched = True
break
else:
if self.debug:
print(f"exclude_entry[{rule_number}]: No Match {item}\n")
rule_number = rule_number + 1
return matched
def eval_entry_by_test(self, entry, test_index, test_entry, defaults):
"""
*entry* Entry from json reply list, Linux command etc.
Linux command etc.
Linux command etc.
*test_index* Dictionary of tests to run against this entry
*test_entry* Test (dictionary) to run against this entry
*defaults* Default values if not present in test_entry
Returns None if:
(1) entry did not match the test
(2) entry matched but no status could be found
"""
fname = "eval_entry_by_test"
return_status = None
if self.debug:
print(f"{fname}[#{test_index}]: check entry against {test_entry['test']}")
try:
tokens = self.lexer.tokenize(test_entry["test"])
result = self.parser.parse(tokens)
if self.debug:
print(f"{fname}[#{test_index}]: parse({test_entry['test']}) -> RESULT:{result}")
except Exception as e:
return_status = Output.Status.FAIL
entry["test_status"] = return_status
entry["test_matched"] = test_entry
entry["test_index"] = test_index
entry["test_exception"] = f"Rule #{test_index}: {e.__class__.__name__} exception '{e}'"
if self.debug:
print(f"{fname}[#{test_index}]: EXCEPTION {e.__class__.__name__} {e}")
return return_status
matched = self.true_value(result)
if matched:
if "status" in test_entry:
status = test_entry["status"]
elif "status" in defaults:
status = defaults["status"]
else:
return None
format_string = None
if "format" in test_entry:
format_string = test_entry["format"]
elif "format" in defaults:
format_string = defaults["format"]
return_status = Output.text_to_status(status)
if self.debug:
print(f"{fname}[#{test_index}]: {status} matched -> "
f"{Output.text_to_status(status)}({status})")
entry["test_status"] = return_status
entry["test_matched"] = test_entry
entry["test_index"] = test_index
entry["test_format"] = format_string
entry["test_exception"] = None
return return_status
def eval_entry_by_tests(self, entry, entry_tests):
"""
*entry* Entry from json reply list, Linux command etc.
Linux command etc.
*entry_tests* Dictionary of tests to run against this entry
"""
no_match = { "status" : None }
defaults = {}
tests = entry_tests["tests"]
if "no_match" in entry_tests:
no_match = entry_tests["no_match"]
if "defaults" in entry_tests:
defaults = entry_tests["defaults"]
self.parser.json_entry = entry
if self.debug:
print("------- eval_entry_by_tests: the entry --------")
pprint.pprint(self.parser.json_entry)
print("------- eval_entry_by_tests: the tests --------")
pprint.pprint(entry_tests)
print("-------------------------------------------------")
return_status = None
test_index = 0
for test in tests:
current_status = self.eval_entry_by_test(entry, test_index, test, defaults)
if current_status is not None:
return_status = current_status
break
test_index += 1
if return_status is None:
return_status = Output.text_to_status(no_match["status"])
entry["test_status"] = return_status
entry["test_matched"] = None
entry["test_exception"] = None
if "format" in no_match:
entry["test_format"] = no_match["format"]
if self.debug:
print(f"test_entry[#N/A]: return_status None -> "
f"{Output.status_to_text(return_status)} ({return_status})")
return return_status
def eval_tests_by_entry(self, entry, entry_tests, func_object):
"""
*entry* Entry from json reply list, Linux command etc.
Linux command etc.
*entry_tests* Dictionary of tests to run against this entry
*func* Function to execute
"""
defaults = {}
tests = entry_tests["tests"]
if "defaults" in entry_tests:
defaults = entry_tests["defaults"]
self.parser.json_entry = entry
if self.debug:
print("------- eval_entry_by_tests: the entry --------")
pprint.pprint(self.parser.json_entry)
print("------- eval_entry_by_tests: the tests --------")
pprint.pprint(entry_tests)
print("-------------------------------------------------")
test_index = 0
for test in tests:
return_status = None
current_status = self.eval_entry_by_test(entry, test_index, test, defaults)
if current_status is not None:
func_object.process_match(entry, test_index, test, defaults)
test_index += 1
```
#### File: stage_check/stage_check/gql_helper.py
```python
import os
import sys
import requests
import json
import pprint
import getpass
from requests.packages.urllib3.exceptions import InsecureRequestWarning
GRAPHQL_API_HOST = '127.0.0.1'
GRAPHQL_API_URL = '/api/v1/graphql'
GRAPHQL_LOGIN_URL = '/api/v1/login'
GRAPHQL_LOCAL_PORT = 31516
def pretty_print_POST(req):
"""
At this point it is completely built and ready
to be fired; it is "prepared".
However pay attention at the formatting used in
this function because it is programmed to be pretty
printed and may differ from the actual request.
"""
print('{}\n{}\n{}\n\n{}'.format(
'-----------START-----------',
req.method + ' ' + req.url,
'\n'.join('{}: {}'.format(k, v) for k, v in req.headers.items()),
req.body,
))
print('------------END------------\n')
class RestToken:
"""
REST API Token manager.
The API token is stored at $HOME/.graphql/token
"""
def __init__(self, doLogin=True, gql_path=None):
if gql_path is None:
home_path = os.path.expanduser("~")
self.token_file_path = os.path.join(home_path, '.graphql', 'token')
else:
self.token_file_path = os.path.join(gql_path, 'token')
try:
with open(self.token_file_path, 'r') as file:
data = file.read().replace('\n', '')
self.token = data
except (IOError, FileNotFoundError) as exception_type:
self.token=''
self.init_token(gql_path=gql_path)
def init_token(self, username='', password='', gql_path=None):
"""
"""
if username == '':
username = getpass.getuser()
if username == 'root':
username = 'admin'
if password == '':
print(f'Please enter password for user {username} to generate a token.')
print('You should only have to do this once')
password = getpass.getpass()
if password == '':
return False
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
api_url = 'https://%s%s' % (GRAPHQL_API_HOST, GRAPHQL_LOGIN_URL)
headers = {'Content-Type': 'application/json'}
credentials = {"username": username, "password": password}
response = requests.post(api_url, headers=headers, data=json.dumps(credentials), verify=False)
json_response = response.json()
try:
self.token = json_response['token']
except KeyError:
pprint.pprint(json_response)
print(f'Failed to obtain token for user "{username}"')
sys.exit(1)
if gql_path is None:
token_path = os.path.join(os.environ['HOME'], '.graphql')
else:
token_path = gql_path
token_file_path = os.path.join(token_path, 'token')
if not os.path.exists(token_path):
os.mkdir(token_path)
with open(token_file_path, "w") as token_file:
token_file.write(self.token)
return True
def get_token(self):
return self.token
class RawGQL:
"""
"""
def __init__(self, raw_string, debug=False):
self.raw_string=raw_string
self._debug = debug
@property
def debug(self):
return self._debug
def get_top_level(self):
return True
def format(self):
return self.raw_string
def build_query(self):
return { "query" : '{' + self.format() + '}' }
def format_results(self, json_dict):
return json_dict['data']
def send_query(self, rt, json_reply, errors=None):
if self.get_top_level() is False:
return
# Don't validate rhe server's cert
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
url = 'https://' + GRAPHQL_API_HOST
if rt is None:
url = url + f":{GRAPHQL_LOCAL_PORT}"
url = url + GRAPHQL_API_URL
headers = {}
if rt is not None:
headers['Authorization'] = 'Bearer %s' % rt.get_token()
headers['Content-Type'] = 'application/json'
files = {}
data = {}
#json_body={ "query": self.build_query() }
json_body = self.build_query()
files['json'] = (None, json.dumps(json_body), 'application/json')
if self.debug:
req = requests.Request('POST', url, data = json.dumps(json_body), headers = headers)
prepared = req.prepare()
pretty_print_POST(prepared)
try:
response = requests.post(url, data = json.dumps(json_body), headers = headers, verify = False)
if self.debug:
print('................START RAW REPLY ......................')
pprint.pprint(response.status_code)
print('......................................................')
pprint.pprint(response.content)
print('......................................................')
pprint.pprint(response.json())
print('.................END RAW REPLY .......................')
except (ConnectionRefusedError,
requests.packages.urllib3.exceptions.NewConnectionError,
requests.exceptions.ConnectionError) as e:
print(f'+----------------------------------------------------------------------------+')
print(f'| Unable to communicate with GraphQL:')
print(f'| {url}')
print(f'| {e.__class__.__name__}')
print(f'| {e}')
print(f'+----------------------------------------------------------------------------+')
sys.exit(1)
server_status = response.status_code
if server_status == 200:
resp_as_json = response.json()
try:
if 'errors' in resp_as_json and \
errors is not None:
errors.clear()
errors.extend(resp_as_json["errors"])
json_reply.update(self.format_results(resp_as_json))
except (KeyError, IndexError) as e:
json_reply.clear()
if self.debug:
if server_status == 200:
print('========= json reply =========')
pprint.pprint(json_reply)
if errors is not None:
print('========= errors ==========')
pprint.pprint(errors)
else:
print(f'========= server status: {server_status} =========')
return server_status
def flatten_json(self, node, stop_prefix, seperator='/', prefix='router', depth=0):
output_list = []
fields_noop = {}
def unedgify(edgy_node):
"""
Replaces ['edges']['node'][key]....
LIST DICT
With a list of dictionaries.
Converts:
{edges : [{node : { 'key-1' : 'value-1' }},
{node : { 'key-1' : 'value-1' }}
]}
To:
[{ 'key-1', 'value-1' },
{ 'key-1', 'value-1' }]
"""
if isinstance(edgy_node, dict):
if 'edges' in edgy_node:
edgy_node = edgy_node['edges']
index = 0
while index < len(edgy_node):
if 'node' in edgy_node[index]:
edgy_node[index] = edgy_node[index]['node']
edgy_node[index] = unedgify(edgy_node[index])
index = index + 1
else:
for key in edgy_node:
edgy_node[key] = unedgify(edgy_node[key])
return edgy_node
def _flatten_json(node, stop_prefix, seperator='.', prefix='router', depth=0):
"""
Flatten json reply, starting at root, down to stopkey, skipping edges and node
key. Each key is named with the levels traversed prepended (each named level
seperated by the seperator parameter..
Returns:
node_list: A list of nodes at level; stop_prefix built up during recursion
field_dict: A dictionary of fields to be applied to node_list during tail-end
recursion (should be empty at finish)
Note:
Currently this will not work for 'unusual' grapqhql where 'node' means a 128T node name
and not a graphql node. Likewise lists not in the form { 'edges' : [ entry1, entry2, ... ] }
can cause poblems
"""
node_list = []
field_dict = {}
skip_list = [ 'edges', 'node' ]
node_type = type(node)
if node_type == list:
for entry in node:
sub_list, sub_fields = _flatten_json(entry, stop_prefix, seperator, prefix, depth)
node_list = node_list + sub_list
field_dict.update(sub_fields)
elif node_type == dict:
for key in node:
prefstr = prefix
if not key in skip_list:
prefstr = prefix + seperator + key
sub_list, sub_fields = _flatten_json(node[key], stop_prefix, seperator, prefstr, depth + 1)
if prefstr == stop_prefix:
sub_list = unedgify(node[key])
if type(sub_list) == dict:
sub_list = sub_list.copy()
if type(sub_list) != list:
sub_list = [sub_list]
node_list = node_list + sub_list
field_dict.update(sub_fields)
else:
# at the stop-level, use normal field names
key = prefix
#print(f'PREFIX:{prefix} STOP_PREFIX:{stop_prefix}')
#pprint.pprint(node)
if stop_prefix in key:
key = prefix.split(seperator)[-1]
#print(f'KEY NOW: {key}')
field_dict[key] = node
if len(node_list) > 0 and \
len(field_dict) > 0:
for entry in node_list:
# cannot blindly do entry.update(field_dict), as subtrees with
# no nodes for stop_prefix will bubble up and overwrite previous
# entries...
for field_key in field_dict:
if type(entry) == dict:
if field_key not in entry:
entry[field_key] = field_dict[field_key]
field_dict = {}
return node_list, field_dict.copy()
output_list, fields_noop = _flatten_json(node, stop_prefix, seperator, prefix, depth)
return output_list
class NodeGQL(RawGQL):
"""
"""
def __init__(self, name, fields=[], names=[], top_level=False, debug=False):
super().__init__('', debug)
self.fields=fields
self.nodes=[]
self.name=name
self.names=names
self.top_level = top_level
# if the name starts with 'all' then assume its top_level...
if self.top_level is False:
if self.name[:3] == 'all':
self.top_level = True
def get_top_level(self):
return self.top_level
def set_fields(self, fields):
self.fields = fields
def clear_fields(self):
self.fields=[]
def add_node(self, node):
self.nodes.append(node)
def get_api_string(self):
api_string = self.name
if len(self.names) > 0:
name_count = 0
api_string += '(names: [ '
for name in self.names:
if name_count > 0:
api_string += ', '
api_string += '"' + name + '"'
name_count += 1
api_string += ' ])'
return api_string
def format(self):
str = self.get_api_string()
field_count=0
if len(self.fields) > 0 or len (self.nodes > 0):
str += ' { edges { node {'
for field in self.fields:
str += ' ' + field
for node in self.nodes:
str += ' ' + node.format()
str += ' } } }'
#print(str)
return str
def format_results(self, json_dict):
return json_dict['data'][self.name]['edges'][0]
```
#### File: stage_check/stage_check/OutputAssetState.py
```python
try:
from stage_check import Output
except ImportError:
import Output
class Base(Output.Base):
def __init__(self):
super().__init__()
self.__full_name = 'OutputAssetState.Base'
"""
interim_result
"""
def proc_interim_result(self, entry, status=None):
if status is not None:
self.status = status
self.amend_interim_result(
entry,
status=status
)
return self.status
def amend_interim_result(self, entry, status=None):
"""
Override this method if necessary
"""
return True
"""
test_result
"""
def proc_test_result(self,
entry_test,
stats,
status=None):
if status is not None:
self.status = status
self.amend_test_result(
entry_test,
stats
)
return self.status
def amend_test_result(
self,
no_match,
stats) :
"""
Override this method if necessary
"""
return True
```
#### File: stage_check/stage_check/OutputDeviceNamespaceText.py
```python
import pprint
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputDeviceNamespace
except ImportError:
import OutputDeviceNamespace
def create_instance():
return OutputDeviceNamespaceText()
class OutputDeviceNamespaceText(OutputDeviceNamespace.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
def add_namespace_match(self,
status,
message,
list_message):
"""
@status
@message
@list_message
"""
self.status = status
if message is not None and message != '':
self.message = message
if list_message is not None and list_message != '':
self.message_list.append(list_message)
return self.status
def amend_namespace_match(self,
message,
list_message):
"""
@message
@list_message
"""
if message is not None and message != '':
self.message = message
if list_message is not None and list_message != '':
self.message_list.append(list_message)
return True
def amend_run_linux_error(self,
return_status,
error_string):
"""
@return_status
@error_string
"""
self.message = error_string
return True
def amend_test_result(self,
ns_line_count,
line_count,
params):
"""
@ns_line_count - Matching line count, specified namespace
@line_count - Matching line count, global namespace
@test_params - Test parameters
"""
if self.message is None:
if line_count == 0 and ns_line_count == 0:
self.message = params['error-no-data']
elif len(self.message_list) > 0:
self.message = self.message_list.pop(0)
return True
```
#### File: stage_check/stage_check/OutputDeviceStateText.py
```python
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputDeviceState
except ImportError:
import OutputDeviceState
def create_instance():
return OutputDeviceStateText()
class OutputDeviceStateText(OutputDeviceState.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
self.message = 'All required network devices In Service'
def amend_test_match(self, entry):
"""
@status
@entry
"""
return self.entry_result_to_text(entry)
def amend_test_fail_result(self, count):
"""
@count
"""
self.message = f"{count} required network devices are not LINK UP"
self.message_list.append("Use PCLI 'show device-interfaces' for more information")
return True
def amend_test_warn_result(self, count):
"""
@count
"""
self.message = f"Incorrect state for {count} device interfaces"
self.message_list.append("Use PCLI 'show device-interfaces' for more information")
return True
```
#### File: stage_check/stage_check/OutputFib.py
```python
try:
from stage_check import Output
except ImportError:
import Output
class Base(Output.Base):
def __init__(self):
super().__init__()
self.__full_name = 'OutputFib.Base'
"""
too_few_entries
"""
def proc_too_few_entries(
self,
params,
stats
):
self.status = Output.Status.FAIL
self.amend_too_few_entries(
params,
stats
)
return self.status
def amend_too_few_entries(
self,
params,
stats
):
return True
"""
too_many_entries
"""
def proc_too_many_entries(
self,
params,
stats
):
self.status = Output.Status.FAIL
self.amend_too_many_entries(
params,
stats
)
return self.status
def amend_too_many_entries(
self,
params,
stats
):
return True
"""
test_match
"""
def proc_test_match(
self,
entry
):
if "test_status" in entry and \
entry["test_status"] == Output.Status.FAIL:
self.status = entry["test_status"]
self.amend_test_match(
entry
)
def amend_test_match(
self,
entry
):
return True
"""
test_result
"""
def proc_test_result(
self,
params,
stats
):
if not "fail_count" in stats:
self.status = Output.Status.OK
self.amend_test_result(
params,
stats
)
def amend_test_result(
self,
params,
stats
):
return True
```
#### File: stage_check/stage_check/OutputFibText.py
```python
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputFib
except ImportError:
import OutputFib
def create_instance():
return OutputFibText()
class OutputFibText(OutputFib.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
def amend_too_few_entries(
self,
params,
stats
):
self.message = f"FIB size = {entry['total_count']} < {minimum_threshold}"
return True
def amend_too_many_entries(
self,
params,
stats
):
self.message = f"FIB size = {entry['total_count']} > {maximum_threshold}"
sel.message_list.append(f"Node {entry['node_name']} FIB size = {entry['totalCount']}")
return True
def amend_test_match(
self,
entry
):
return self.entry_result_to_text(entry)
def amend_test_result(
self,
params,
stats
):
if "fail_count" in stats:
self.message = f"{stats['fail_count']} / {stats['total_count']} FIB entries FAIL ({stats['exclude_count']} excluded)"
else:
self.message = f"{stats['total_count'] - stats['exclude_count']} FIB entries PASS ({stats['exclude_count']} excluded)"
return True
```
#### File: stage_check/stage_check/OutputGatewayPingText.py
```python
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputGatewayPing
except ImportError:
import OutputGatewayPing
def create_instance():
return OutputGatewayPingText()
class OutputGatewayPingText(OutputGatewayPing.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
self.message = "No Results to display..."
def amend_cannot_ping_no_gateway(self,
entry,
ni_key):
"""
Invoked if no gateway can be found for the network interface
@entry
@ni_key
"""
self.message_list.append(f'Cannot ping via NI {entry[ni_key]}, No Gateway!')
return True
def amend_cannot_ping_dev_status(self,
entry,
ni_key,
status):
"""
Invoked when ping cannot be completed due to the device interface
having status != OPER_UP
@entry
@ni_key
@status
"""
self.message_list.append(f'Cannot ping via NI {entry[ni_key]}, device status: {status}')
return True
def amend_no_data_in_reply(self,
entry,
ni_key,
gateway):
"""
Invoked when a ping has been issued and here is missing address
data in the ping reply.
@entry
@ni_key - Key used to get interface name from entry
@gateway - Pinging to/via this Gateway
"""
error_msg = f"{entry[ni_key]} -> {gateway}: No address info in reply"
if not error_msg in self.message_list:
self.message_list.append(f"{entry[ni_key]} -> {gateway}: No address info in reply")
return True
def amend_no_address_in_reply(self,
entry,
ni_key):
"""
Invoked when no address or gateway data is available for
a network interface, and thus ping cannot be issued
@entry - Interface entry being processed
@ni_key - Key used to get interface name from entry
"""
self.message_list.append(f"Skipping NI {entry[ni_key]}: No address info in reply")
return True
def amend_ping_result_pass(self,
entry,
ni_key,
ping_count,
ping_success_count,
target,
average_reply_time):
"""
Invoked when all ping_count pings for a network interface receive responses
@entry,
@ni_key,
@ping_count,
@ping_success_count,
@target,
@average_reply_time
"""
self.message_list.append(f"NI {entry[ni_key]}: {ping_success_count}/{ping_count} replies from {target}; " +
f"average latency {average_reply_time:.2}ms")
return True
def amend_ping_result_fail(self,
entry,
ni_key,
ping_count,
ping_fail_count,
target,
average_reply_time):
"""
Invoked when some of the pings for a network interface receive no response
@entry,
@ni_key,
@ping_count,
@ping_success_count,
@target,
@average_reply_time
"""
self.message_list.append(f"NI {entry[ni_key]}: {ping_fail_count}/{ping_count} fails to {target}; " +
f"average latency {average_reply_time:.2}ms")
return True
def amend_test_result(self,
gateway_count,
gateway_success_count,
gateway_fail_count,
params):
"""
@gateway_count
@gateway_success_count
@gateway_fail_count
@params
"""
iterations = params['iterations']
if gateway_count == 0:
self.message = "No matching Network Interfaces!"
if len(self.message_list) > 1:
if self.status == Output.Status.OK:
self.message = f"All {gateway_success_count} matching NIs received" \
f" {iterations}/{iterations} replies"
else:
self.message = f"Some matching NIs received < {iterations} replies"
elif len(self.message_list) == 1:
self.message = self.message_list.pop()
return True
```
#### File: stage_check/stage_check/OutputLogsText.py
```python
import pprint
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputLogs
except ImportError:
import OutputLogs
def create_instance():
return OutputLogsText()
class OutputLogsText(OutputLogs.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
def amend_pattern_matched(
self,
params,
pattern
):
format_str = "Missing format string..."
try:
index = int(pattern["pindex"])
except (KeyError, ValueError) as e:
self.message_list.append(f"No pattern index available!")
return True
try:
patterns = params["patterns"]
except KeyError:
self.message_list.append(f"Pattern List missing from config.json")
return True
try:
config = patterns[index]
except IndexError:
self.message_list.append(f"No config for pattern index={index}!")
return True
try:
format_str = config["format"]
except KeyError:
pass
output_str = Output.populate_format(pattern, format_str);
self.message_list.append(output_str)
return True
def amend_test_result(
self,
params,
stats
):
"""
"""
format_str="Missing format..."
if self.status == Output.Status.FAIL:
try:
format_str = params["result"]["FAIL"]
except KeyError:
pass
else:
try:
format_str = params["result"]["PASS"]
except KeyError:
pass
stats["past_hours"] = params["past_hours"]
self.message = Output.populate_format(stats, format_str)
return self.status
```
#### File: stage_check/stage_check/OutputRecentCores.py
```python
try:
from stage_check import Output
except ImportError:
import Output
class Base(Output.Base):
def __init__(self):
super().__init__()
self.__full_name = 'OutputRecentCores.Base'
"""
test_result
"""
def proc_uptime_match(
self,
cores
):
self.amend_uptime_match(
cores
)
def amend_uptime_match(
self,
cores
):
return True
"""
service_match
"""
def proc_service_match(
self,
service,
cores
):
self.amend_service_match(
service,
cores
)
def amend_service_match(
self,
service,
cores
):
return True
"""
test_result
"""
def proc_test_result(
self,
message,
status = None
):
if status is not None:
self.status = status
self.amend_test_result(
message
)
def amend_test_result(
self,
message
):
return True
```
#### File: stage_check/stage_check/OutputRecentCoresText.py
```python
import pprint
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import OutputRecentCores
except ImportError:
import OutputRecentCores
def create_instance():
return OutputRecentCoresText()
class OutputRecentCoresText(OutputRecentCores.Base, Output.Text):
"""
"""
def __init__(self):
super().__init__()
def amend_uptime_match(
self,
cores
):
exec_counts = {}
for core in cores:
exec_name = core["EXE"]
exec_name = exec_name.split('/')[-1]
try:
exec_counts[exec_name] += 1
except KeyError:
exec_counts[exec_name] = 1
for exec_name in exec_counts:
self.message_list.append(f"{exec_counts[exec_name]} {exec_name} crashes since OS boot")
return True
def amend_service_match(
self,
service,
cores
):
exec_counts = {}
for core in cores:
exec_name = core["EXE"]
exec_name = exec_name.split('/')[-1]
try:
exec_counts[exec_name] += 1
except KeyError:
exec_counts[exec_name] = 1
for exec_name in exec_counts:
self.message_list.append(f"{exec_counts[exec_name]} {exec_name} crashes since {service} start")
return True
def amend_test_result(
self,
message
):
self.message = message
```
#### File: stage_check/stage_check/OutputRedundancyDatabaseConn.py
```python
try:
from stage_check import Output
except ImportError:
import Output
class Base(Output.Base):
"""
"""
def __init__(self):
super().__init__()
self.__full_name = "OutputRedundancyDatabaseConn.Base"
self.status = Output.Status.OK
"""
no_node_data
"""
def proc_no_node_data(self, local_info):
"""
"""
self.status = Output.Status.WARN
self.amend_no_node_data(local_info)
return self.status
def amend_no_node_data(self, local_info):
"""
"""
return True
"""
metric
"""
def proc_metric(self, entry, entry_value):
"""
"""
self.amend_metric(entry, entry_value)
return self.status
def amend_metric(self, entry, entry_value):
"""
"""
return True
"""
missing_data
"""
def proc_missing_data(self):
"""
"""
self.status = Output.Status.FAIL
self.amend_missing_data()
return self.status
def amend_missing_data(self):
"""
"""
return True
"""
"""
def proc_test_result(self,
status,
entry_count,
test_value,
fail_count,
expected_entries):
"""
@entry_count
@test_value
@fail_count
@expected_entries
"""
if status is not None:
self.status = status
self.amend_test_result(
entry_count,
test_value,
fail_count,
expected_entries
)
return self.status
def amend_test_result(self,
entry_count,
test_value,
fail_count,
expected_entries):
"""
@entry_count
@test_value
@fail_count
@expected_entries
"""
return True
"""
"""
def proc_test_result_bad_values(self,
entry_count,
test_value,
fail_count,
expected_entries):
"""
@entry_count
@test_value
@fail_count
@expected_entries
"""
self.amend_test_result_bad_values(
entry_count,
test_value,
fail_count,
expected_entries
)
return self.status
def amed_test_result_bad_values(self,
entry_count,
test_value,
fail_count,
expected_entries):
"""
@entry_count
@test_value
@fail_count
@expected_entries
"""
return True
```
#### File: stage_check/stage_check/TestEthtool.py
```python
import pprint
import re
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import EntryTest
except ImportError:
import EntryTest
try:
from stage_check import Linux
except ImportError:
import Linux
try:
from stage_check import AbstractTest
except ImportError:
import AbstractTest
def create_instance(test_id, config, args):
"""
Invoked by TestExecutor class to create a test instance
@test_id - test index number
@config - test parameters from configuration
@args - command line args
"""
return TestT1Detail(test_id, config, args)
class TestT1Detail(AbstractTest.Linux, EntryTest.MatchFunction):
"""
Tests to see if the requested device pattern matches in the
global namespace and/or the specified namespaces(s)
"""
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
self.results = []
def get_params(self):
# apply defaults
default_params = {
"node_type" : "secondary",
"linux_device" : "",
"exclude_tests" : [],
"entry_tests" : {},
}
params = self.apply_default_params(default_params)
return params
def run(self, local_info, router_context, gql_token, fp):
"""
"""
# Ugly!
test_info = self.test_info(local_info, router_context)
self.output.test_start(test_info, status=Output.Status.OK)
params = self.get_params()
if self.check_user("root") != Output.Status.OK:
return self.output.test_end(fp)
self.output.progress_start(fp)
json_data = {}
error_lines = []
ethtool = Linux.Ethtool(
debug=self.debug,
progobj=self
)
# Ugly....
self.fp = fp
shell_status = ethtool.run_linux_args(
local_info,
router_context,
params['node_type'],
params['linux_device'],
error_lines,
json_data
)
# Ugly...
self.fp = None
if self.debug:
print('........ flattened list ..........')
pprint.pprint(json_data)
engine = EntryTest.Parser(debug=self.debug)
engine.eval_entry_by_tests(
json_data,
params["entry_tests"]
)
self.output.proc_test_result(json_data)
return self.output.test_end(fp)
```
#### File: stage_check/stage_check/TestInterfaceLearnedIP.py
```python
import pprint
import ipaddress
try:
from stage_check import gql_helper
except ImportError:
import gql_helper
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import AbstractTest
except ImportError:
import AbstractTest
def create_instance(test_id, config, args):
"""
Invoked by TestExecutor class to create a test instance
@test_id - test index number
@config - test parameters from, config
@args - command line args
"""
return TestInterfaceLearnedIP(test_id, config, args)
class TestInterfaceLearnedIP(AbstractTest.GraphQL):
"""
"""
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
def requires_grapqhl(self):
"""
Override
"""
return True
def get_params(self):
"""
"""
default_params = {
"network-interfaces" : [],
"exclude_tests" : [],
"skip_no_address" : True
}
params = self.apply_default_params(default_params)
return params
def run(self, local_info, router_context, gql_token, fp):
"""
This test uses the gql engine to get device state state
Sample data returned by query:
[{'name': 'ha-fabric', 'state': None},
{'name': 'DIA', 'state': {'addresses': [{'ipAddress': '172.16.4.103'}]}},
{'name': 'mpls-t1', 'state': {'addresses': [{'ipAddress': '<empty>'}]}}]
"""
test_info = self.test_info(local_info, router_context)
self.output.test_start(test_info)
params = self.get_params()
include_list = params["network-interfaces"]
exclude_list = params["exclude_tests"]
# Kind of a hack as we suggest that its OK for an address to be missing
# Theoretically this is an error condition, but currently it is normal
# that GraphQL will sort of arbitrarily pick a node's L2 HA interfaces
# to place state information in.
skip_if_address_missing = params["skip_no_address"]
"""
API = allRouters
Fields = name
"""
qr = gql_helper.NodeGQL("allRouters", ['name'], [ router_context.get_router() ], debug=self.debug)
qn = gql_helper.NodeGQL("nodes", ['name'])
qd = gql_helper.NodeGQL("deviceInterfaces", [ 'name', 'sharedPhysAddress', 'state { operationalStatus }' ])
qi = gql_helper.NodeGQL("networkInterfaces", ['name', 'state { addresses { ipAddress } }'], include_list)
qr.add_node(qn)
qn.add_node(qd)
qd.add_node(qi)
json_reply={}
if not self.send_query(qr, gql_token, json_reply):
return self.output.test_end(fp)
flatter_json = qr.flatten_json(json_reply, 'router/nodes/deviceInterfaces/networkInterfaces', '/')
router_context.set_allRouters_node_type(flatter_json)
# do not work around the grapqhl api for now...
# self._workaround_graphql_api(flatter_json)
if self.debug:
print('........ flattened list ..........')
pprint.pprint(flatter_json)
address_list=[]
not_excluded_count = 0
if len(flatter_json) > 0:
for entry in flatter_json:
try:
if_name = None
if_name = entry['name']
except KeyError:
pass
if len(include_list) > 0 and \
not if_name in include_list:
continue
if len(exclude_list) > 0 and \
if_name in exclude_list:
continue
address=None
if if_name is not None:
if self.exclude_flat_entry(entry, exclude_list):
continue
#if entry['router/nodes/deviceInterfaces/state/operationalStatus'] != "OPER_UP"
try:
address = entry['state']['addresses'][0]['ipAddress']
except (KeyError, IndexError, TypeError) as e:
# TODO: Report Exception {e.__class__.__name__} {e}
if not skip_if_address_missing:
self.output.proc_address_missing(None, entry)
continue
if address is not None:
# address='1.1.1.1'
if address == '<empty>':
self.output.proc_empty_address(entry)
else:
ip_address = ipaddress.ip_address(address)
status = self.output.status
if ip_address.is_private:
status = Output.Status.FAIL
iptype = 'Private'
else:
iptype = 'Public'
address_list.append(address)
self.output.proc_address_type(status, entry, address, iptype)
else:
if skip_if_address_missing:
continue
self.output.proc_address_missing(None, entry)
not_excluded_count += 1
else:
self.output.proc_no_interfaces_found(include_list)
status = self.output.status
if status == Output.Status.OK and \
not_excluded_count == 0:
status = Output.Status.FAIL
self.output.proc_test_result(status, address_list, not_excluded_count)
return self.output.test_end(fp)
```
#### File: stage_check/stage_check/TestPeerReachability.py
```python
import pprint
try:
from stage_check import gql_helper
except ImportError:
import gql_helper
try:
from stage_check import AbstractTest
except ImportError:
import AbstractTest
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import EntryTest
except ImportError:
import EntryTest
def create_instance(test_id, config, args):
"""
Invoked by TestExecutor class to create a test instance
@test_id - test index number
@config - test parameters from, config
@args - command line args
"""
return TestPeerReachability(test_id, config, args)
class TestPeerReachability(AbstractTest.GraphQL):
"""
"""
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
def requires_grapqhl(self):
"""
Override
"""
return True
def get_params(self):
"""
expected_entries shoudl fail schema validation if
missing from the parameters
"""
default_params = {
"test_value" : "UP",
"test_equality" : True,
"exclude_tests" : [],
"include_list" : []
}
params = self.apply_default_params(default_params)
return params
def run(self, local_info, router_context, gql_token, fp):
"""
This test uses the gql engine to get peer reachability status
"""
test_info = self.test_info(local_info, router_context)
self.output.test_start(test_info)
params = self.get_params()
# TODO figure out what the include_list is, a list of peers?
include_list = params["include_list"]
exclusions = params["exclude_tests"]
entry_tests = params["entry_tests"]
"""
API = allNodes
Fields = name
"""
qr = gql_helper.NodeGQL("allRouters", ['name'], [ router_context.get_router() ], debug=self.debug)
qp = gql_helper.NodeGQL("peers", [ 'name', 'paths { node adjacentNode deviceInterface networkInterface adjacentAddress status }' ],
include_list)
qr.add_node(qp)
json_reply={}
if not self.send_query(qr, gql_token, json_reply):
return self.output.test_end(fp)
# this query is not working correctly unfortunately... even UP is returned
flatter_json = qr.flatten_json(json_reply, 'router/peers/paths')
router_context.set_allRouters_node_type(flatter_json, 'node')
if self.debug:
print('........ flattened list ..........')
pprint.pprint(flatter_json)
paths_per_peer = {}
failed_peer_paths = {}
stats = {}
Output.init_result_stats(stats)
stats["total_count"] = len(flatter_json)
stats["failed_peer_count"] = 0
stats["tested_peer_count"] = 0
engine = EntryTest.Parser(self.debug)
for path in flatter_json:
try:
if engine.exclude_entry(path, exclusions):
stats["exclude_count"] += 1
continue
peer_name = path['router/peers/name']
test_result = engine.eval_entry_by_tests(
path,
entry_tests
)
Output.update_stats(stats, test_result)
if peer_name in paths_per_peer:
paths_per_peer[peer_name] += 1
else:
paths_per_peer[peer_name] = 1
stats["tested_peer_count"] += 1
if test_result == Output.Status.FAIL:
if peer_name in failed_peer_paths:
failed_peer_paths[peer_name] += 1
else:
failed_peer_paths[peer_name] = 1
stats["failed_peer_count"] += 1
self.output.proc_failed_peer(path, peer_name)
self.output.proc_failed_path(path)
except KeyError:
pass
status = Output.Status.OK
if stats["FAIL"] > 0:
status = Output.Status.FAIL
self.output.proc_test_result(entry_tests, stats, status=status)
return self.output.test_end(fp)
```
#### File: stage_check/stage_check/TestSessions.py
```python
import pprint
try:
from stage_check import gql_helper
except ImportError:
import gql_helper
try:
from stage_check import Output
except ImportError:
import Output
try:
from stage_check import AbstractTest
except ImportError:
import AbstractTest
try:
from stage_check import EntryTest
except ImportError:
import EntryTest
def create_instance(test_id, config, args):
"""
Invoked by TestExecutor class to create a test instance
@test_id - test index number
@config - test parameters from, config
@args - command line args
"""
return TestSessions(test_id, config, args)
class TestSessions(AbstractTest.GraphQL):
"""
Filtering of the session table is performed in highwayManager by
taking all fields, concatenating into a string and matching the
filter against this string to match the flow.
"""
def __init__(self, test_id, config, args):
super().__init__(test_id, config, args)
def requires_grapqhl(self):
"""
Override
"""
return True
def get_params(self):
"""
Apply defaults (some of these are non-sensical
and absense should be caught during schema
validation
"""
default_params = {
"idle_theshold_seconds" : 0,
"idle_maximum_seconds" : 0,
"max_sessions_to_query" : 0,
"filter_string" : "",
"match_port" : 0,
"exclude_tests" : []
}
params = self.apply_default_params(default_params)
return params
def run(self, local_info, router_context, gql_token, fp):
flowEntryFields = [ \
'sourceIp',
'destIp',
'sourcePort',
'destPort',
'vlan',
'devicePort',
'protocol',
'sessionUuid',
'natIp',
'natPort',
'serviceName',
'tenant',
'encrypted',
'inactivityTimeout',
'deviceInterfaceName',
'networkInterfaceName',
'startTime',
'forward'
]
test_info = self.test_info(local_info, router_context)
self.output.test_start(test_info)
params = self.get_params()
try:
idle_threshold_seconds = params["idle_threshold_seconds"]
idle_maximum_seconds = params["idle_maximum_seconds"]
max_sessions = params["max_sessions_to_query"]
filter_string = params["filter_string"]
match_port = params["match_port"]
except Exception as e:
# TODO: Improve error handling
print("CONFIG ERROR\n")
return Output.Status.FAIL
exclude_tests = []
if "exclude_tests" in params:
exclude_tests = params["exclude_tests"]
flow_entry_suffix=f'(first: {max_sessions}, filter: "\\"\\"~\\"{filter_string}\\"")'
if local_info.get_router_name() == router_context.get_router() and \
local_info.get_node_type() == 'conductor':
# Check Error output
self.output.unsupported_node_type(local_info)
return Output.Status.WARN
qr = gql_helper.NodeGQL("allRouters", ['name'], [ router_context.get_router() ], debug=self.debug)
qn = gql_helper.NodeGQL("nodes", ['name'])
qf = gql_helper.NodeGQL(f"flowEntries{flow_entry_suffix}", flowEntryFields)
qr.add_node(qn)
qn.add_node(qf)
json_reply={}
if not self.send_query(qr, gql_token, json_reply):
return self.output.test_end(fp)
# Unfortunately jmespath is buggy and does not work well for integers :-(
# This is unforunate as the hope was to use a jmespath expression
# to eliminate all valid sessions (however that might be defined)
flatter_json = qr.flatten_json(json_reply, 'router/nodes/flowEntries', '/')
if self.debug:
print('........ flattened list ..........')
pprint.pprint(flatter_json)
matching_flows = {}
session_flow_counts = {}
stats = {}
Output.init_result_stats(stats);
stats["total_count"] = len(flatter_json)
stats["session_flow_count"] = 0
engine = EntryTest.Parser(debug=self.debug)
for flow in flatter_json:
try:
uuid = flow['sessionUuid']
if engine.exclude_entry(flow, exclude_tests):
stats["exclude_count"] += 1
continue
if not uuid in session_flow_counts:
session_flow_counts[uuid] = 1
else:
session_flow_counts[uuid] += 1
test_status = engine.eval_entry_by_tests(flow, params["entry_tests"])
Output.update_stats(stats, test_status)
if test_status == Output.Status.FAIL:
# Note that this must be configured in the parameters just so the value can
# be used in this calculation
delta = idle_maximum_seconds - flow['inactivityTimeout']
flow["test_idle_duration"] = delta
if not uuid in matching_flows or \
matching_flows[uuid]["test_inactivity_duration"] < delta:
matching_flows[uuid] = flow
except (KeyError, TypeError) as e:
flow["test_exception"] = f"Flow Exception: {e}"
continue
stats["session_flow_count"] = len(session_flow_counts)
status = Output.Status.FAIL
if len(matching_flows) == 0:
status = Output.Status.OK
self.output.proc_test_result(status, matching_flows, stats, params)
return self.output.test_end(fp)
```
#### File: stage_check/tests/test_entry.py
```python
import re
import json
import time
import datetime
import pprint
import sly
import pytest
try:
from stage_check import EntryTest
except ImportError:
import EntryTest
try:
from stage_check import Output
except ImportError:
import Output
test_data = [
{
"exclude_tests" : [
"route/tenant == 'null-tenant'"
],
"entry_tests" : {
"no_match" : {
"status" : "PASS",
"text" : "blah blah blah"
},
"defaults" : {
"status" : "FAIL"
},
"tests" : [
{
"test" : "@Loss_Of_Signal == 'Undefined'",
"status" : "FAIL",
"format" : "Test Undefined Variable",
},
{
"test" : "@Loss_Of_Signal == 'None'",
"status" : "FAIL",
"format" : "Test None Variable",
},
{
"test" : "@Loss_Of_Signal != 'Undefined' && Loss_Of_Signal > 1000",
"status" : "FAIL",
"format" : "Test Loss_Of_Signal > 1000",
},
{
"test" : "?gateway == False",
"status" : "WARN",
"format" : "Test Gateway Undefined",
},
{
"test" : "route/ipPrefix == '0.0.0.0/0' && gateway == '172.23.5.1' && route/l4Port < 1000",
"status" : "FAIL",
"format" : "Test Compound expression for {gateway}",
},
{
"test" : "route/ipPrefix == '0.0.0.0/8'",
"status" : "PASS",
"format" : "Test PASS for route/ipPrefix"
},
{
"test" : "route/ipPrefix == '0.0.0.0/8'",
"status" : "FAIL",
"format" : "Test FAIL for route/ipPrefix"
}
]
}
}
]
@pytest.mark.parametrize('entry,expected',
[
(
{
"Loss_Of_Signal" : "50000",
"serviceName": "internet_service",
"route/ipPrefix": "0.0.0.0/0",
"route/l4Port": 0,
"route/l4PortUpper": 0,
"route/protocol": 'null',
"route/tenant": "null-tenant",
"devicePort": 110,
"gateway": "172.23.5.1",
"nodeId": 2,
"vlan": 1011,
"deviceInterface": "PUBLIC_S",
"networkInterface": "public_1011",
},
{
"status" : "SKIP"
}
),
(
{
"Loss_Of_Signal" : "50000",
"serviceName": "internet_service",
"route/ipPrefix": "0.0.0.0/0",
"route/l4Port": 0,
"route/l4PortUpper": 0,
"route/protocol": 'null',
"route/tenant": "unconstrained.wireless",
"devicePort": 110,
"gateway": "172.23.5.1",
"nodeId": 2,
"vlan": 1011,
"deviceInterface": "PUBLIC_S",
"networkInterface": "public_1011"
},
{
"status" : "FAIL"
}
),
(
{
"Loss_Of_Signal" : "100",
"serviceName": "<ControlMessageService>",
"route/ipPrefix": "0.0.0.0/8",
"route/l4Port": 0,
"route/l4PortUpper": 0,
"route/protocol": 'null',
"route/tenant": "<global>"
},
{
"status" : "WARN"
}
),
(
{
"Loss_Of_Signal" : "100",
"serviceName": "<ControlMessageService>",
"route/ipPrefix": "0.0.0.0/8",
"route/l4Port": 0,
"route/l4PortUpper": 0,
"route/protocol": 'null',
"route/tenant": "<global>",
"gateway": "172.23.5.1"
},
{
"status" : "PASS"
}
),
(
{
},
{
"status" : "FAIL"
}
),
(
{
"Loss_Of_Signal" : None
},
{
"status" : "FAIL"
}
)
]
)
def test_entry_matching(entry, expected):
global test_data
print("#######################################")
pprint.pprint(entry)
print("#######################################")
parser = EntryTest.Parser(debug=True)
matched = parser.exclude_entry(entry, test_data[0]["exclude_tests"])
pprint.pprint(matched)
if matched:
# ensure we really meant this entry to be skipped
assert expected["status"] == "SKIP"
#print(f"Excluded entry:")
#print(f"==================")
#pprint.pprint(entry)
else:
test_status = parser.eval_entry_by_tests(entry, test_data[0]["entry_tests"])
print(f"Matched Entry Status: {Output.status_to_text(test_status)}({test_status})")
#print("#######################################")
#pprint.pprint(entry)
#print("#######################################")
assert Output.status_to_text(test_status) == expected["status"]
``` |
{
"source": "1291945816/ML_Assessment",
"score": 3
} |
#### File: 1291945816/ML_Assessment/HandleImage.py
```python
import numpy as np
import cv2
def image_to_bin(img):
'''
:param img: 一幅已被处理好的图片
:return:
'''
tempimage = []
h,w,_=img.shape
for x in range(h):
tempimage_temp = []
for y in range(w):
if int(img[x][y][0])+int(img[x][y][1])+int(img[x][y][2]) == 0:
tempimage.append(int(1))
else:
tempimage.append(int(0))
return tempimage
def image_handle(filename):
img = cv2.imread(filename)
img = cv2.resize(img,(128,64),interpolation=cv2.INTER_CUBIC)
pic = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #将图片灰度化
#将每一个元素进行二值化 保持只有黑白统一的像素值
for index_x,i in enumerate(img):
for index_y,j in enumerate(i):
if (j>200).all():
img[index_x][index_y] = 240 # 白色
else:
img[index_x][index_y] = 0 # 黑色
#根据列的白像素大小来判断
(h,w,_)=img.shape
w -= 1
col_nz = [0] * w;
for x in range(w):
for y in range(h):
if img[y,x][0] == 240:
col_nz[x] += 1
#获取每一个数字的范围
count_scope = []
temp = 0
tempValue=-1
for i in range(len(col_nz)):
if col_nz[i] <= 60 and temp==0:
tempValue = i
temp = 1
elif temp == 1 and col_nz[i] > 60:
count_scope.append((tempValue,i))
temp = 0
image_list = []
if(len(count_scope)>=1):
for x in count_scope:
tempImage = img[:,x[0]:x[1]]
tempImage = cv2.resize(tempImage, (32, 32), interpolation=cv2.INTER_CUBIC)
tempImage = image_to_bin(tempImage)
image_list.append(tempImage)
return image_list
else:
return None
``` |
{
"source": "12944qwerty/Ratchet",
"score": 3
} |
#### File: Ratchet/Ratchet/Bot.py
```python
import discord as d
import sys, traceback
from discord.ext.commands import Bot
from discord.ext.commands import bot_has_permissions
from discord.ext.commands import has_permissions
from discord.ext import commands as c
import os
from random import randint
import datetime
class Bot(c.Cog):
def __init__(self,client):
self.bot = client
@c.group(invoke_without_command=True,name='info',aliases=['desc','botinfo'])
async def info(self,ctx):
"""Sends desc of bot"""
try:
await ctx.send('I am Ratchet. A fun bot. To see a list of commands, type in \'\\helps\'.\n To report a bug, DM the owner. `<PASSWORD>`')
await ctx.send('Subcommands:\n - `\\info guilds`\n - `\\info status`\n - `\\info invite`\n - `\\info ping`\n - `\\info emojis`')
except Exception as e:
print(e)
@info.command(name='guilds')
async def guilds(self,ctx):
for guild in self.bot.guilds:
await ctx.send(f'{guild.name} - {guild.id}')
@info.command(name='emojis')
async def emojis(self,ctx):
"""spits out the emojis that this bot is connected to"""
emojis = ''
for emoji in self.bot.emojis:
emojis += '{}'.format(str(emoji))
await ctx.send(emojis)
@info.command(name='status', aliases=['use','usage'])
async def status(self,ctx):
await ctx.send('__**_Status_**__ <:online:512174327899226123>')
await ctx.send('Server Count: {}'.format(len(self.bot.guilds)))
user = 0
for guild in self.bot.guilds:
for member in guild.members:
user += 1
await ctx.send('Serving {} users'.format(user))
"""@info.command(name='inv', aliases=['invite_link','invite'])
async def inv(self,ctx):
Want this bot on your server?
await ctx.send('Hi! If you would like me to be on your server, please use this link:\n <https://discordapp.com/api/oauth2/authorize?client_id=549642567718010880&permissions=2146958839&scope=bot>')"""
@c.guild_only()
@c.command(name='leave')
async def leave(self,ctx):
"""LEaves the Guild. ADMIN ONLY"""
if ctx.author.id == <PASSWORD>:
await ctx.send('Awww, why don\'t you want me???')
await ctx.guild.leave()
@c.command(name='report',aliases=['bug'])
async def report(self,ctx):
await ctx.send('Please report any bugs or glitches with this bot to 12944qwerty#9317. :D\nIf you have any suggestions, feel free to talk there too.')
@info.command(name='ping')
async def ping(self,ctx):
"""Tests for reply speed"""
milis = datetime.datetime.now().timestamp()
try:
em = d.Embed(title='Pong!:ping_pong:')
em.add_field(name='Latency',value=f'{round(self.bot.latency * 1000,1)}ms')
milis1 = datetime.datetime.now().timestamp()
em.add_field(name='Heartbeat:heartbeat:',value=f'{round((milis1-milis) * 1000000,2)}ms')
em.set_footer(text=self.bot.user.display_name, icon_url=self.bot.user.avatar_url)
msg = await ctx.send(embed=em)
milis2 = datetime.datetime.now().timestamp()
em.add_field(name='Edit',value=f'{round((milis2-milis)*1000,1)}ms')
await msg.edit(embed=em)
except Exception as e:
print(e)
def setup(bot):
bot.add_cog(Bot(bot))
``` |
{
"source": "1297rohit/FaceReco",
"score": 2
} |
#### File: FaceReco/FaceReco/supportFiles.py
```python
from pkg_resources import resource_filename
def pose_predictor_model_location():
return resource_filename(__name__, "./shape_predictor_68_face_landmarks.dat")
def modelFile_location():
return resource_filename(__name__, "./opencv_face_detector_uint8.pb")
def face_recognition_model_location():
return resource_filename(__name__, "./dlib_face_recognition_resnet_model_v1.dat")
def configFile_location():
return resource_filename(__name__, "./opencv_face_detector.pbtxt")
``` |
{
"source": "1299172402/BBDown_GUI",
"score": 2
} |
#### File: BBDown_GUI/UI/main.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form_main(object):
def setupUi(self, Form_main):
Form_main.setObjectName("Form_main")
Form_main.resize(1560, 500)
Form_main.setMinimumSize(QtCore.QSize(620, 400))
Form_main.setMaximumSize(QtCore.QSize(1560, 500))
self.horizontalLayoutWidget = QtWidgets.QWidget(Form_main)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(30, 180, 571, 31))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.checkBox_ffmpeg = QtWidgets.QCheckBox(self.horizontalLayoutWidget)
self.checkBox_ffmpeg.setWhatsThis("")
self.checkBox_ffmpeg.setChecked(True)
self.checkBox_ffmpeg.setObjectName("checkBox_ffmpeg")
self.horizontalLayout.addWidget(self.checkBox_ffmpeg)
self.lineEdit_ffmpeg = QtWidgets.QLineEdit(self.horizontalLayoutWidget)
self.lineEdit_ffmpeg.setText("")
self.lineEdit_ffmpeg.setReadOnly(False)
self.lineEdit_ffmpeg.setObjectName("lineEdit_ffmpeg")
self.horizontalLayout.addWidget(self.lineEdit_ffmpeg)
self.pushButton_ffmpeg = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.pushButton_ffmpeg.setEnabled(True)
self.pushButton_ffmpeg.setObjectName("pushButton_ffmpeg")
self.horizontalLayout.addWidget(self.pushButton_ffmpeg)
self.horizontalLayoutWidget_3 = QtWidgets.QWidget(Form_main)
self.horizontalLayoutWidget_3.setGeometry(QtCore.QRect(30, 220, 571, 31))
self.horizontalLayoutWidget_3.setObjectName("horizontalLayoutWidget_3")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_3)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_dir = QtWidgets.QLabel(self.horizontalLayoutWidget_3)
self.label_dir.setObjectName("label_dir")
self.horizontalLayout_3.addWidget(self.label_dir)
self.lineEdit_dir = QtWidgets.QLineEdit(self.horizontalLayoutWidget_3)
self.lineEdit_dir.setText("")
self.lineEdit_dir.setObjectName("lineEdit_dir")
self.horizontalLayout_3.addWidget(self.lineEdit_dir)
self.pushButton_dir = QtWidgets.QPushButton(self.horizontalLayoutWidget_3)
self.pushButton_dir.setEnabled(True)
self.pushButton_dir.setObjectName("pushButton_dir")
self.horizontalLayout_3.addWidget(self.pushButton_dir)
self.groupBox = QtWidgets.QGroupBox(Form_main)
self.groupBox.setGeometry(QtCore.QRect(440, 20, 161, 71))
self.groupBox.setObjectName("groupBox")
self.comboBox_source = QtWidgets.QComboBox(self.groupBox)
self.comboBox_source.setGeometry(QtCore.QRect(10, 30, 141, 22))
self.comboBox_source.setObjectName("comboBox_source")
self.comboBox_source.addItem("")
self.comboBox_source.addItem("")
self.comboBox_source.addItem("")
self.comboBox_source.addItem("")
self.groupBox_2 = QtWidgets.QGroupBox(Form_main)
self.groupBox_2.setGeometry(QtCore.QRect(440, 110, 161, 51))
self.groupBox_2.setObjectName("groupBox_2")
self.comboBox_encoding = QtWidgets.QComboBox(self.groupBox_2)
self.comboBox_encoding.setGeometry(QtCore.QRect(10, 20, 141, 22))
self.comboBox_encoding.setObjectName("comboBox_encoding")
self.comboBox_encoding.addItem("")
self.comboBox_encoding.addItem("")
self.comboBox_encoding.addItem("")
self.comboBox_encoding.addItem("")
self.groupBox_3 = QtWidgets.QGroupBox(Form_main)
self.groupBox_3.setGeometry(QtCore.QRect(20, 20, 131, 141))
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayoutWidget_3 = QtWidgets.QWidget(self.groupBox_3)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(10, 20, 111, 111))
self.verticalLayoutWidget_3.setObjectName("verticalLayoutWidget_3")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.pushButton_login = QtWidgets.QPushButton(self.verticalLayoutWidget_3)
self.pushButton_login.setObjectName("pushButton_login")
self.verticalLayout_4.addWidget(self.pushButton_login)
self.pushButton_logintv = QtWidgets.QPushButton(self.verticalLayoutWidget_3)
self.pushButton_logintv.setObjectName("pushButton_logintv")
self.verticalLayout_4.addWidget(self.pushButton_logintv)
self.horizontalLayoutWidget_4 = QtWidgets.QWidget(Form_main)
self.horizontalLayoutWidget_4.setGeometry(QtCore.QRect(30, 280, 571, 41))
self.horizontalLayoutWidget_4.setObjectName("horizontalLayoutWidget_4")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_4)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.label_url = QtWidgets.QLabel(self.horizontalLayoutWidget_4)
self.label_url.setObjectName("label_url")
self.horizontalLayout_4.addWidget(self.label_url)
self.lineEdit_url = QtWidgets.QLineEdit(self.horizontalLayoutWidget_4)
self.lineEdit_url.setMaximumSize(QtCore.QSize(16777215, 24))
self.lineEdit_url.setObjectName("lineEdit_url")
self.horizontalLayout_4.addWidget(self.lineEdit_url)
self.pushButton_download = QtWidgets.QPushButton(Form_main)
self.pushButton_download.setGeometry(QtCore.QRect(510, 360, 93, 28))
self.pushButton_download.setObjectName("pushButton_download")
self.pushButton_advanced = QtWidgets.QPushButton(Form_main)
self.pushButton_advanced.setGeometry(QtCore.QRect(410, 360, 93, 28))
self.pushButton_advanced.setObjectName("pushButton_advanced")
self.pushButton_about = QtWidgets.QPushButton(Form_main)
self.pushButton_about.setEnabled(True)
self.pushButton_about.setGeometry(QtCore.QRect(30, 360, 93, 28))
self.pushButton_about.setObjectName("pushButton_about")
self.groupBox_4 = QtWidgets.QGroupBox(Form_main)
self.groupBox_4.setGeometry(QtCore.QRect(800, 180, 181, 111))
self.groupBox_4.setObjectName("groupBox_4")
self.verticalLayoutWidget_10 = QtWidgets.QWidget(self.groupBox_4)
self.verticalLayoutWidget_10.setGeometry(QtCore.QRect(10, 20, 160, 80))
self.verticalLayoutWidget_10.setObjectName("verticalLayoutWidget_10")
self.verticalLayout_10 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_10)
self.verticalLayout_10.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.checkBox_mp4box = QtWidgets.QCheckBox(self.verticalLayoutWidget_10)
self.checkBox_mp4box.setObjectName("checkBox_mp4box")
self.verticalLayout_10.addWidget(self.checkBox_mp4box)
self.checkBox_mp4box_path = QtWidgets.QCheckBox(self.verticalLayoutWidget_10)
self.checkBox_mp4box_path.setObjectName("checkBox_mp4box_path")
self.verticalLayout_10.addWidget(self.checkBox_mp4box_path)
self.lineEdit_mp4box_path = QtWidgets.QLineEdit(self.verticalLayoutWidget_10)
self.lineEdit_mp4box_path.setObjectName("lineEdit_mp4box_path")
self.verticalLayout_10.addWidget(self.lineEdit_mp4box_path)
self.groupBox_5 = QtWidgets.QGroupBox(Form_main)
self.groupBox_5.setGeometry(QtCore.QRect(1000, 20, 211, 141))
self.groupBox_5.setObjectName("groupBox_5")
self.verticalLayoutWidget_13 = QtWidgets.QWidget(self.groupBox_5)
self.verticalLayoutWidget_13.setGeometry(QtCore.QRect(10, 20, 187, 113))
self.verticalLayoutWidget_13.setObjectName("verticalLayoutWidget_13")
self.verticalLayout_13 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_13)
self.verticalLayout_13.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.verticalLayout_9 = QtWidgets.QVBoxLayout()
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.checkBox_token = QtWidgets.QCheckBox(self.verticalLayoutWidget_13)
self.checkBox_token.setObjectName("checkBox_token")
self.verticalLayout_9.addWidget(self.checkBox_token)
self.lineEdit_token = QtWidgets.QLineEdit(self.verticalLayoutWidget_13)
self.lineEdit_token.setPlaceholderText("")
self.lineEdit_token.setObjectName("lineEdit_token")
self.verticalLayout_9.addWidget(self.lineEdit_token)
self.verticalLayout_13.addLayout(self.verticalLayout_9)
self.verticalLayout_8 = QtWidgets.QVBoxLayout()
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.checkBox_c = QtWidgets.QCheckBox(self.verticalLayoutWidget_13)
self.checkBox_c.setObjectName("checkBox_c")
self.verticalLayout_8.addWidget(self.checkBox_c)
self.lineEdit_c = QtWidgets.QLineEdit(self.verticalLayoutWidget_13)
self.lineEdit_c.setPlaceholderText("")
self.lineEdit_c.setObjectName("lineEdit_c")
self.verticalLayout_8.addWidget(self.lineEdit_c)
self.verticalLayout_13.addLayout(self.verticalLayout_8)
self.groupBox_6 = QtWidgets.QGroupBox(Form_main)
self.groupBox_6.setGeometry(QtCore.QRect(630, 20, 141, 141))
self.groupBox_6.setObjectName("groupBox_6")
self.verticalLayoutWidget_6 = QtWidgets.QWidget(self.groupBox_6)
self.verticalLayoutWidget_6.setGeometry(QtCore.QRect(10, 30, 121, 99))
self.verticalLayoutWidget_6.setObjectName("verticalLayoutWidget_6")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_6)
self.verticalLayout_6.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.checkBox_audio_only = QtWidgets.QCheckBox(self.verticalLayoutWidget_6)
self.checkBox_audio_only.setObjectName("checkBox_audio_only")
self.verticalLayout_6.addWidget(self.checkBox_audio_only)
self.checkBox_video_only = QtWidgets.QCheckBox(self.verticalLayoutWidget_6)
self.checkBox_video_only.setObjectName("checkBox_video_only")
self.verticalLayout_6.addWidget(self.checkBox_video_only)
self.checkBox_sub_only = QtWidgets.QCheckBox(self.verticalLayoutWidget_6)
self.checkBox_sub_only.setObjectName("checkBox_sub_only")
self.verticalLayout_6.addWidget(self.checkBox_sub_only)
self.checkBox_danmaku = QtWidgets.QCheckBox(self.verticalLayoutWidget_6)
self.checkBox_danmaku.setObjectName("checkBox_danmaku")
self.verticalLayout_6.addWidget(self.checkBox_danmaku)
self.groupBox_7 = QtWidgets.QGroupBox(Form_main)
self.groupBox_7.setGeometry(QtCore.QRect(1230, 20, 311, 271))
self.groupBox_7.setObjectName("groupBox_7")
self.verticalLayoutWidget_4 = QtWidgets.QWidget(self.groupBox_7)
self.verticalLayoutWidget_4.setGeometry(QtCore.QRect(10, 30, 291, 231))
self.verticalLayoutWidget_4.setObjectName("verticalLayoutWidget_4")
self.verticalLayout_14 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_4)
self.verticalLayout_14.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.checkBox_F = QtWidgets.QCheckBox(self.verticalLayoutWidget_4)
self.checkBox_F.setObjectName("checkBox_F")
self.verticalLayout.addWidget(self.checkBox_F)
self.lineEdit_F = QtWidgets.QLineEdit(self.verticalLayoutWidget_4)
self.lineEdit_F.setObjectName("lineEdit_F")
self.verticalLayout.addWidget(self.lineEdit_F)
self.verticalLayout_14.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.checkBox_M = QtWidgets.QCheckBox(self.verticalLayoutWidget_4)
self.checkBox_M.setObjectName("checkBox_M")
self.verticalLayout_2.addWidget(self.checkBox_M)
self.lineEdit_M = QtWidgets.QLineEdit(self.verticalLayoutWidget_4)
self.lineEdit_M.setObjectName("lineEdit_M")
self.verticalLayout_2.addWidget(self.lineEdit_M)
self.verticalLayout_14.addLayout(self.verticalLayout_2)
self.verticalLayout_18 = QtWidgets.QVBoxLayout()
self.verticalLayout_18.setObjectName("verticalLayout_18")
self.label_val = QtWidgets.QLabel(self.verticalLayoutWidget_4)
self.label_val.setObjectName("label_val")
self.verticalLayout_18.addWidget(self.label_val)
self.plainTextEdit = QtWidgets.QPlainTextEdit(self.verticalLayoutWidget_4)
self.plainTextEdit.setReadOnly(True)
self.plainTextEdit.setObjectName("plainTextEdit")
self.verticalLayout_18.addWidget(self.plainTextEdit)
self.verticalLayout_14.addLayout(self.verticalLayout_18)
self.groupBox_8 = QtWidgets.QGroupBox(Form_main)
self.groupBox_8.setGeometry(QtCore.QRect(620, 300, 191, 171))
self.groupBox_8.setObjectName("groupBox_8")
self.verticalLayoutWidget_12 = QtWidgets.QWidget(self.groupBox_8)
self.verticalLayoutWidget_12.setGeometry(QtCore.QRect(10, 20, 174, 141))
self.verticalLayoutWidget_12.setObjectName("verticalLayoutWidget_12")
self.verticalLayout_12 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_12)
self.verticalLayout_12.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.checkBox_p_show_all = QtWidgets.QCheckBox(self.verticalLayoutWidget_12)
self.checkBox_p_show_all.setObjectName("checkBox_p_show_all")
self.verticalLayout_12.addWidget(self.checkBox_p_show_all)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.checkBox_p = QtWidgets.QCheckBox(self.verticalLayoutWidget_12)
self.checkBox_p.setObjectName("checkBox_p")
self.verticalLayout_5.addWidget(self.checkBox_p)
self.lineEdit_p = QtWidgets.QLineEdit(self.verticalLayoutWidget_12)
self.lineEdit_p.setObjectName("lineEdit_p")
self.verticalLayout_5.addWidget(self.lineEdit_p)
self.verticalLayout_12.addLayout(self.verticalLayout_5)
self.verticalLayout_11 = QtWidgets.QVBoxLayout()
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.checkBox_p_delay = QtWidgets.QCheckBox(self.verticalLayoutWidget_12)
self.checkBox_p_delay.setObjectName("checkBox_p_delay")
self.verticalLayout_11.addWidget(self.checkBox_p_delay)
self.lineEdit_p_delay = QtWidgets.QLineEdit(self.verticalLayoutWidget_12)
self.lineEdit_p_delay.setObjectName("lineEdit_p_delay")
self.verticalLayout_11.addWidget(self.lineEdit_p_delay)
self.verticalLayout_12.addLayout(self.verticalLayout_11)
self.groupBox_9 = QtWidgets.QGroupBox(Form_main)
self.groupBox_9.setGeometry(QtCore.QRect(790, 20, 201, 141))
self.groupBox_9.setObjectName("groupBox_9")
self.verticalLayoutWidget_15 = QtWidgets.QWidget(self.groupBox_9)
self.verticalLayoutWidget_15.setGeometry(QtCore.QRect(10, 30, 181, 99))
self.verticalLayoutWidget_15.setObjectName("verticalLayoutWidget_15")
self.verticalLayout_15 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_15)
self.verticalLayout_15.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_15.setObjectName("verticalLayout_15")
self.checkBox_ia = QtWidgets.QCheckBox(self.verticalLayoutWidget_15)
self.checkBox_ia.setObjectName("checkBox_ia")
self.verticalLayout_15.addWidget(self.checkBox_ia)
self.checkBox_info = QtWidgets.QCheckBox(self.verticalLayoutWidget_15)
self.checkBox_info.setObjectName("checkBox_info")
self.verticalLayout_15.addWidget(self.checkBox_info)
self.checkBox_hs = QtWidgets.QCheckBox(self.verticalLayoutWidget_15)
self.checkBox_hs.setObjectName("checkBox_hs")
self.verticalLayout_15.addWidget(self.checkBox_hs)
self.checkBox_debug = QtWidgets.QCheckBox(self.verticalLayoutWidget_15)
self.checkBox_debug.setObjectName("checkBox_debug")
self.verticalLayout_15.addWidget(self.checkBox_debug)
self.groupBox_10 = QtWidgets.QGroupBox(Form_main)
self.groupBox_10.setGeometry(QtCore.QRect(630, 180, 151, 111))
self.groupBox_10.setObjectName("groupBox_10")
self.verticalLayoutWidget_16 = QtWidgets.QWidget(self.groupBox_10)
self.verticalLayoutWidget_16.setGeometry(QtCore.QRect(10, 20, 131, 80))
self.verticalLayoutWidget_16.setObjectName("verticalLayoutWidget_16")
self.verticalLayout_16 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_16)
self.verticalLayout_16.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.checkBox_skip_subtitle = QtWidgets.QCheckBox(self.verticalLayoutWidget_16)
self.checkBox_skip_subtitle.setObjectName("checkBox_skip_subtitle")
self.verticalLayout_16.addWidget(self.checkBox_skip_subtitle)
self.checkBox_skip_cover = QtWidgets.QCheckBox(self.verticalLayoutWidget_16)
self.checkBox_skip_cover.setObjectName("checkBox_skip_cover")
self.verticalLayout_16.addWidget(self.checkBox_skip_cover)
self.checkBox_skip_mux = QtWidgets.QCheckBox(self.verticalLayoutWidget_16)
self.checkBox_skip_mux.setObjectName("checkBox_skip_mux")
self.verticalLayout_16.addWidget(self.checkBox_skip_mux)
self.groupBox_11 = QtWidgets.QGroupBox(Form_main)
self.groupBox_11.setGeometry(QtCore.QRect(990, 180, 221, 111))
self.groupBox_11.setObjectName("groupBox_11")
self.verticalLayoutWidget_17 = QtWidgets.QWidget(self.groupBox_11)
self.verticalLayoutWidget_17.setGeometry(QtCore.QRect(10, 20, 198, 81))
self.verticalLayoutWidget_17.setObjectName("verticalLayoutWidget_17")
self.verticalLayout_17 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_17)
self.verticalLayout_17.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_17.setObjectName("verticalLayout_17")
self.checkBox_mt = QtWidgets.QCheckBox(self.verticalLayoutWidget_17)
self.checkBox_mt.setChecked(False)
self.checkBox_mt.setObjectName("checkBox_mt")
self.verticalLayout_17.addWidget(self.checkBox_mt)
self.checkBox_language = QtWidgets.QCheckBox(self.verticalLayoutWidget_17)
self.checkBox_language.setObjectName("checkBox_language")
self.verticalLayout_17.addWidget(self.checkBox_language)
self.lineEdit_language = QtWidgets.QLineEdit(self.verticalLayoutWidget_17)
self.lineEdit_language.setObjectName("lineEdit_language")
self.verticalLayout_17.addWidget(self.lineEdit_language)
self.horizontalLayoutWidget_5 = QtWidgets.QWidget(Form_main)
self.horizontalLayoutWidget_5.setGeometry(QtCore.QRect(30, 420, 571, 31))
self.horizontalLayoutWidget_5.setObjectName("horizontalLayoutWidget_5")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_5)
self.horizontalLayout_5.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.label_bbdown = QtWidgets.QLabel(self.horizontalLayoutWidget_5)
self.label_bbdown.setObjectName("label_bbdown")
self.horizontalLayout_5.addWidget(self.label_bbdown)
self.lineEdit_bbdown = QtWidgets.QLineEdit(self.horizontalLayoutWidget_5)
self.lineEdit_bbdown.setObjectName("lineEdit_bbdown")
self.horizontalLayout_5.addWidget(self.lineEdit_bbdown)
self.pushButton_bbdown = QtWidgets.QPushButton(self.horizontalLayoutWidget_5)
self.pushButton_bbdown.setObjectName("pushButton_bbdown")
self.horizontalLayout_5.addWidget(self.pushButton_bbdown)
self.horizontalLayoutWidget_2 = QtWidgets.QWidget(Form_main)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(220, 320, 381, 31))
self.horizontalLayoutWidget_2.setObjectName("horizontalLayoutWidget_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.radioButton_p_current = QtWidgets.QRadioButton(self.horizontalLayoutWidget_2)
self.radioButton_p_current.setChecked(True)
self.radioButton_p_current.setObjectName("radioButton_p_current")
self.horizontalLayout_2.addWidget(self.radioButton_p_current)
self.radioButton_p_all = QtWidgets.QRadioButton(self.horizontalLayoutWidget_2)
self.radioButton_p_all.setObjectName("radioButton_p_all")
self.horizontalLayout_2.addWidget(self.radioButton_p_all)
self.radioButton_p_new = QtWidgets.QRadioButton(self.horizontalLayoutWidget_2)
self.radioButton_p_new.setObjectName("radioButton_p_new")
self.horizontalLayout_2.addWidget(self.radioButton_p_new)
self.groupBox_14 = QtWidgets.QGroupBox(Form_main)
self.groupBox_14.setGeometry(QtCore.QRect(160, 20, 271, 141))
self.groupBox_14.setObjectName("groupBox_14")
self.radioButton_dfn_480P = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_480P.setGeometry(QtCore.QRect(140, 50, 120, 19))
self.radioButton_dfn_480P.setObjectName("radioButton_dfn_480P")
self.radioButton_dfn_more = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_more.setGeometry(QtCore.QRect(10, 100, 61, 19))
self.radioButton_dfn_more.setObjectName("radioButton_dfn_more")
self.radioButton_dfn_priority = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_priority.setGeometry(QtCore.QRect(10, 20, 249, 19))
self.radioButton_dfn_priority.setChecked(True)
self.radioButton_dfn_priority.setObjectName("radioButton_dfn_priority")
self.radioButton_dfn_1080P = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_1080P.setGeometry(QtCore.QRect(10, 50, 119, 19))
self.radioButton_dfn_1080P.setObjectName("radioButton_dfn_1080P")
self.radioButton_dfn_360P = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_360P.setGeometry(QtCore.QRect(140, 70, 120, 19))
self.radioButton_dfn_360P.setObjectName("radioButton_dfn_360P")
self.radioButton_dfn_720P = QtWidgets.QRadioButton(self.groupBox_14)
self.radioButton_dfn_720P.setGeometry(QtCore.QRect(10, 70, 119, 19))
self.radioButton_dfn_720P.setObjectName("radioButton_dfn_720P")
self.comboBox_dfn_more = QtWidgets.QComboBox(self.groupBox_14)
self.comboBox_dfn_more.setGeometry(QtCore.QRect(80, 100, 181, 22))
self.comboBox_dfn_more.setObjectName("comboBox_dfn_more")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.comboBox_dfn_more.addItem("")
self.groupBox_12 = QtWidgets.QGroupBox(Form_main)
self.groupBox_12.setGeometry(QtCore.QRect(840, 300, 211, 171))
self.groupBox_12.setObjectName("groupBox_12")
self.verticalLayoutWidget_7 = QtWidgets.QWidget(self.groupBox_12)
self.verticalLayoutWidget_7.setGeometry(QtCore.QRect(10, 20, 196, 141))
self.verticalLayoutWidget_7.setObjectName("verticalLayoutWidget_7")
self.verticalLayout_19 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_7)
self.verticalLayout_19.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_19.setObjectName("verticalLayout_19")
self.checkBox_use_aria2c = QtWidgets.QCheckBox(self.verticalLayoutWidget_7)
self.checkBox_use_aria2c.setObjectName("checkBox_use_aria2c")
self.verticalLayout_19.addWidget(self.checkBox_use_aria2c)
self.checkBox_aria2c_path = QtWidgets.QCheckBox(self.verticalLayoutWidget_7)
self.checkBox_aria2c_path.setObjectName("checkBox_aria2c_path")
self.verticalLayout_19.addWidget(self.checkBox_aria2c_path)
self.lineEdit_aria2c_path = QtWidgets.QLineEdit(self.verticalLayoutWidget_7)
self.lineEdit_aria2c_path.setText("")
self.lineEdit_aria2c_path.setReadOnly(False)
self.lineEdit_aria2c_path.setObjectName("lineEdit_aria2c_path")
self.verticalLayout_19.addWidget(self.lineEdit_aria2c_path)
self.checkBox_aria2c_proxy = QtWidgets.QCheckBox(self.verticalLayoutWidget_7)
self.checkBox_aria2c_proxy.setObjectName("checkBox_aria2c_proxy")
self.verticalLayout_19.addWidget(self.checkBox_aria2c_proxy)
self.lineEdit_aria2c_proxy = QtWidgets.QLineEdit(self.verticalLayoutWidget_7)
self.lineEdit_aria2c_proxy.setObjectName("lineEdit_aria2c_proxy")
self.verticalLayout_19.addWidget(self.lineEdit_aria2c_proxy)
self.retranslateUi(Form_main)
QtCore.QMetaObject.connectSlotsByName(Form_main)
def retranslateUi(self, Form_main):
_translate = QtCore.QCoreApplication.translate
Form_main.setWindowTitle(_translate("Form_main", "BBDown - GUI"))
self.checkBox_ffmpeg.setText(_translate("Form_main", "ffmpeg"))
self.pushButton_ffmpeg.setText(_translate("Form_main", "浏览"))
self.label_dir.setText(_translate("Form_main", "下载目录"))
self.pushButton_dir.setText(_translate("Form_main", "浏览"))
self.groupBox.setTitle(_translate("Form_main", "下载源"))
self.comboBox_source.setItemText(0, _translate("Form_main", "网页端"))
self.comboBox_source.setItemText(1, _translate("Form_main", "TV端"))
self.comboBox_source.setItemText(2, _translate("Form_main", "APP端"))
self.comboBox_source.setItemText(3, _translate("Form_main", "国际端"))
self.groupBox_2.setTitle(_translate("Form_main", "下载视频编码格式"))
self.comboBox_encoding.setItemText(0, _translate("Form_main", "优先可用编码"))
self.comboBox_encoding.setItemText(1, _translate("Form_main", "AVC"))
self.comboBox_encoding.setItemText(2, _translate("Form_main", "AV1"))
self.comboBox_encoding.setItemText(3, _translate("Form_main", "HEVC"))
self.groupBox_3.setTitle(_translate("Form_main", "账户"))
self.pushButton_login.setText(_translate("Form_main", "登录"))
self.pushButton_logintv.setText(_translate("Form_main", "登录(TV端)"))
self.label_url.setText(_translate("Form_main", "视频地址"))
self.lineEdit_url.setToolTip(_translate("Form_main", "<html><head/><body><p>视频地址 或 av bv BV ep ss</p></body></html>"))
self.lineEdit_url.setWhatsThis(_translate("Form_main", "<html><head/><body><p><br/></p></body></html>"))
self.pushButton_download.setText(_translate("Form_main", "下载"))
self.pushButton_advanced.setText(_translate("Form_main", "高级选项>"))
self.pushButton_about.setText(_translate("Form_main", "关于"))
self.groupBox_4.setTitle(_translate("Form_main", "MP4Box"))
self.checkBox_mp4box.setText(_translate("Form_main", "使用MP4Box来混流"))
self.checkBox_mp4box_path.setText(_translate("Form_main", "设置MP4Box的路径"))
self.lineEdit_mp4box_path.setToolTip(_translate("Form_main", "<html><head/><body><p>mp4box的路径</p></body></html>"))
self.lineEdit_mp4box_path.setWhatsThis(_translate("Form_main", "<html><head/><body><p>路径不要包含空格</p></body></html>"))
self.groupBox_5.setTitle(_translate("Form_main", "cookies"))
self.checkBox_token.setToolTip(_translate("Form_main", "<html><head/><body><p>设置access_token用以下载TV/APP接口的会员内容</p></body></html>"))
self.checkBox_token.setText(_translate("Form_main", "单独设置access_token"))
self.checkBox_c.setToolTip(_translate("Form_main", "<html><head/><body><p>设置字符串cookie用以下载网页接口的会员内容</p></body></html>"))
self.checkBox_c.setText(_translate("Form_main", "单独设置cookie"))
self.groupBox_6.setTitle(_translate("Form_main", "下载选项"))
self.checkBox_audio_only.setText(_translate("Form_main", "仅下载音频"))
self.checkBox_video_only.setText(_translate("Form_main", "仅下载视频"))
self.checkBox_sub_only.setText(_translate("Form_main", "仅下载字幕"))
self.checkBox_danmaku.setText(_translate("Form_main", "下载弹幕"))
self.groupBox_7.setTitle(_translate("Form_main", "文件名选项"))
self.checkBox_F.setText(_translate("Form_main", "单分P"))
self.lineEdit_F.setPlaceholderText(_translate("Form_main", "<videoTitle>"))
self.checkBox_M.setText(_translate("Form_main", "多分P"))
self.lineEdit_M.setPlaceholderText(_translate("Form_main", "<videoTitle>/[P<pageNumberWithZero>]<pageTitle>"))
self.label_val.setText(_translate("Form_main", "内置变量"))
self.plainTextEdit.setPlainText(_translate("Form_main", "<videoTitle>: 视频主标题\n"
"<pageNumber>: 视频分P序号\n"
"<pageNumberWithZero>: 视频分P序号(前缀补零)\n"
"<pageTitle>: 视频分P标题\n"
"<aid>: 视频aid\n"
"<cid>: 视频cid\n"
"<dfn>: 视频清晰度\n"
"<res>: 视频分辨率\n"
"<fps>: 视频帧率\n"
"<videoCodecs>: 视频编码\n"
"<videoBandwidth>: 视频码率\n"
"<audioCodecs>: 音频编码\n"
"<audioBandwidth>: 音频码率"))
self.groupBox_8.setTitle(_translate("Form_main", "分P"))
self.checkBox_p_show_all.setText(_translate("Form_main", "展示所有分P标题"))
self.checkBox_p.setToolTip(_translate("Form_main", "<html><head/><body><p>选择指定分p或分p范围:(-p 8 或 -p 1,2 或 -p 3-5 或 -p ALL)</p></body></html>"))
self.checkBox_p.setText(_translate("Form_main", "指定下载分P"))
self.lineEdit_p.setToolTip(_translate("Form_main", "<html><head/><body><p>选择指定分p或分p范围:(-p 8 或 -p 1,2 或 -p 3-5 或 -p ALL)</p></body></html>"))
self.lineEdit_p.setPlaceholderText(_translate("Form_main", "如1,2或3-5或ALL或NEW"))
self.checkBox_p_delay.setToolTip(_translate("Form_main", "<html><head/><body><p>设置下载合集分P之间的下载间隔时间(单位: 秒, 默认无间隔)</p></body></html>"))
self.checkBox_p_delay.setText(_translate("Form_main", "分P下载时间间隔"))
self.lineEdit_p_delay.setPlaceholderText(_translate("Form_main", "0"))
self.groupBox_9.setTitle(_translate("Form_main", "交互选项"))
self.checkBox_ia.setText(_translate("Form_main", "交互式选择清晰度"))
self.checkBox_info.setText(_translate("Form_main", "仅解析而不进行下载"))
self.checkBox_hs.setText(_translate("Form_main", "不显示所有音视频流"))
self.checkBox_debug.setText(_translate("Form_main", "输出调试日志"))
self.groupBox_10.setTitle(_translate("Form_main", "跳过选项"))
self.checkBox_skip_subtitle.setText(_translate("Form_main", "跳过字幕下载"))
self.checkBox_skip_cover.setText(_translate("Form_main", "跳过封面下载"))
self.checkBox_skip_mux.setText(_translate("Form_main", "跳过混流步骤"))
self.groupBox_11.setTitle(_translate("Form_main", "其他"))
self.checkBox_mt.setText(_translate("Form_main", "使用多线程下载"))
self.checkBox_language.setToolTip(_translate("Form_main", "<html><head/><body><p>设置混流的音频语言(代码),如chi, jpn等</p></body></html>"))
self.checkBox_language.setText(_translate("Form_main", "设置混流的音频语言代码"))
self.lineEdit_language.setPlaceholderText(_translate("Form_main", "如chi,jpn"))
self.label_bbdown.setText(_translate("Form_main", " BBDown"))
self.pushButton_bbdown.setText(_translate("Form_main", "浏览"))
self.radioButton_p_current.setText(_translate("Form_main", "下载当前分P"))
self.radioButton_p_all.setText(_translate("Form_main", "下载全部分P"))
self.radioButton_p_new.setText(_translate("Form_main", "下载最新分P"))
self.groupBox_14.setTitle(_translate("Form_main", "下载视频画质"))
self.radioButton_dfn_480P.setText(_translate("Form_main", "480P 清晰"))
self.radioButton_dfn_more.setText(_translate("Form_main", "更多"))
self.radioButton_dfn_priority.setText(_translate("Form_main", "优先下载最高画质"))
self.radioButton_dfn_1080P.setText(_translate("Form_main", "1080P 高清"))
self.radioButton_dfn_360P.setText(_translate("Form_main", "360P 流畅"))
self.radioButton_dfn_720P.setText(_translate("Form_main", "720P 高清"))
self.comboBox_dfn_more.setItemText(0, _translate("Form_main", "优先下载最高画质"))
self.comboBox_dfn_more.setItemText(1, _translate("Form_main", "8K 超高清"))
self.comboBox_dfn_more.setItemText(2, _translate("Form_main", "杜比视界"))
self.comboBox_dfn_more.setItemText(3, _translate("Form_main", "HDR 真彩"))
self.comboBox_dfn_more.setItemText(4, _translate("Form_main", "4K 超清"))
self.comboBox_dfn_more.setItemText(5, _translate("Form_main", "1080P 高帧率"))
self.comboBox_dfn_more.setItemText(6, _translate("Form_main", "1080P 高码率"))
self.comboBox_dfn_more.setItemText(7, _translate("Form_main", "720P 高帧率"))
self.groupBox_12.setTitle(_translate("Form_main", "aria2c"))
self.checkBox_use_aria2c.setText(_translate("Form_main", "使用aria2c"))
self.checkBox_aria2c_path.setText(_translate("Form_main", "aria2c的路径"))
self.checkBox_aria2c_proxy.setToolTip(_translate("Form_main", "<html><head/><body><p>调用aria2c进行下载时的代理地址配置</p></body></html>"))
self.checkBox_aria2c_proxy.setText(_translate("Form_main", "aria2c的代理地址"))
self.lineEdit_aria2c_proxy.setToolTip(_translate("Form_main", "<html><head/><body><p>调用aria2c进行下载时的代理地址配置</p></body></html>"))
``` |
{
"source": "129Camal/SPLN-19",
"score": 3
} |
#### File: SPLN-19/Testes/aula.py
```python
from bs4 import BeautifulSoup as BS
import requests
import subprocess
from build_utils import build_profile
#
def getHTML(word):
#
urlBase = "https://dicionario.priberam.org/"
#
composedURL = urlBase + word
#response = requests.get(composedURL).content
response = subprocess.check_output(['curl',composedURL])
#
soup = BS(response)
return soup
#
def getFromSoup(soup):
# find elements using CSS selector
#resultados = soup.select('#resultados .def')
#
resultados = soup.find('div',id='resultados')
#
data = resultados.find_all('span','def')
#
return data
#
def elem_extr_func(original_word,data,result_objts):
#
resultObjects = result_objts
#
defs = resultObjects['significados']
#
defs[original_word] = [word.text for word in data]
#
result_objts = resultObjects
#
def build_priberam_profile(words):
#
obj = {'significados':{}}
#
priberam_profile = build_profile(words,getHTML,getFromSoup,obj,elem_extr_func)
#
return priberam_profile
#
words = ['banana','Ezequiel','João','balão']
priberam_profile = build_priberam_profile(words)
```
#### File: TP1/Enun_2/xmlStruct.py
```python
import xml.etree.ElementTree as ET
import sys
file = sys.argv[1]
tree = ET.parse(file)
root = tree.getroot()
count = 1
#itera sobre os filhos dos filhos do root imprimindo as suas tags identadas
def parseChilds(parent, count):
for child in parent:
print(('\t'* count)+'-->',child.tag)
children = child.getchildren()
if(children):
parseChilds(children, count+1)
#imprime tag do elemento root
print(root.tag)
#itera sobre os filhos do root imprimindo as suas tags
for child in root:
print('-->', child.tag)
children = child.getchildren()
if(children):
parseChilds(children, count)
```
#### File: SPLN-19/TP2/companyNLP.py
```python
import nltk
from owlready2 import *
# Pacotes do nltk para fazer download
#nltk.download('stopwords')
#nltk.download('words')
#nltk.download('maxent_ne_chunker')
#nltk.download('averaged_perceptron_tagger')
# Abrir ontologia
ontology = get_ontology("http://spln.di.uminho.pt/ontology")
# Especificar Classes, ObjectProperties e DataProperties
with ontology:
class Company(Thing):
pass
class Product(Thing):
pass
class Location(Thing):
pass
class is_product_of(ObjectProperty):
domain = [Product]
range = [Company]
class is_location_of(ObjectProperty):
domain = [Location]
range = [Company]
class has_product(ObjectProperty):
domain = [Company]
range = [Product]
inverse_property = is_product_of
class has_location(ObjectProperty):
domain = [Company]
range = [Location]
inverse_property = is_location_of
class has_cost(DataProperty, FunctionalProperty):
domain = [Product]
range = [float]
# Abrir ficheiro com o texto
text = open("text.txt").read()
products = []
prices = []
my_company = ""
indexProduct = 1
# Função que associa produtos e preços a uma determinada empresa na ontologia
def addProducts():
global indexProduct
j = 0
for product in products:
aux = []
for i in range(len(product)-1):
aux.append(product[i][0])
aux.append(str(indexProduct))
x = "_".join(aux)
obj = Product(x)
obj.has_cost = float(prices[j][1][0])
my_company.has_product.append(obj)
indexProduct += 1
j += 1
del products[:]
del prices[:]
# Divisão por frases
sentences = nltk.sent_tokenize(text)
#Percorrer cada frase procurando entidades ou fazendo chunks para descobrir determinadas coisas como produtos ou preços.
for sentence in sentences:
# Divisão por palavras
words = nltk.word_tokenize(sentence)
# Tags de discurso
tags = nltk.pos_tag(words)
#Chunks
#Company: {<NNP>+<NN>?}
chunkGram = r"""Product: {<NN.*>+<IN>}
{<NN.*>+<V.*>}
Price: {<\$><CD>}"""
chunkParser = nltk.RegexpParser(chunkGram)
tree = chunkParser.parse(tags)
#Entidades
namedEnt = nltk.ne_chunk(tags)
for t in namedEnt.subtrees():
#Encontrar localizações
if t.label() == "GPE":
location = t.leaves()
for i in range(len(location)):
location[i] = location[i][0]
local = "_".join(location)
my_company.has_location.append(Location(local))
#Encontrar empresas
if t.label() == "ORGANIZATION" or t.label() == "PERSON":
company = (t.leaves())[0][0]
if(not ontology.company):
if(len(products)):
addProducts()
my_company = Company(company)
# Tratamento de dados sobre os chunks encontrados de preços e produtos
flag = 0
for subtree in tree.subtrees():
if subtree.label() == "Price":
price = subtree.leaves()
flag = 1
if subtree.label() == "Product":
product = subtree.leaves()
if flag == 1:
products.append(product)
prices.append(price)
flag = 0
# Inserir na ontologia os produtos e preços da ultima empresa encontrada
addProducts()
# Guardar a ontologia
ontology.save(file = "onto.rdf", format = "rdfxml")
# print("Company: " + company)
# print("Products: " + str(products))
# print("Locations: " + str(locations))
# print("Company: " + str(company) + "\n\n\n")
# print("\nEmpresa: " + str(Company.instances()))
# print("Produtos: " + str(Product.instances()))
# for i in Product.instances():
# print("Custo do produto " + str(i) +": " + str(i.has_cost))
# print("Locais: " + str(Location.instances()))
# print("Localizações da Empresa: " + str(list(ontology.has_location.get_relations())))
# print("Produtos da Empresa: " + str(list(ontology.has_product.get_relations())))
```
#### File: SPLN-19/TP3/scrapping.py
```python
from bs4 import BeautifulSoup as BS
import requests
import sys
import re
import getopt
import os
#
# Funtion to get the meaning of the word with certain semantic meaning
#
def getMeaning(word, semantic_meaning):
urlBase = "https://www.lexico.com/en/definition/" + word
response = requests.get(urlBase).content
soup = BS(response, 'html.parser')
word_types = soup.findAll('section', 'gramb')
for word_type in word_types:
instance = word_type.find('h3', 'ps pos')
if(semantic_meaning != instance.span.text):
continue
ul = word_type.find('ul', 'semb')
lis = ul.findAll('li')
allmeaning = []
allsyn = []
for li in lis:
div = li.find('div', 'trg')
if(div):
p = div.find('p')
if(p):
span = p.find('span', 'ind')
if(span):
allmeaning.append(span.text)
# Get Synonyms
sysn = div.find('div', 'synonyms')
if(sysn):
strong = sysn.find('strong', 'syn')
exg = sysn.find('span', 'syn')
if(not exg):
allsyn.append(strong.text)
else:
allsyn.append(strong.text + exg.text)
else:
allsyn.append("Doesn`t have synonyms")
if(len(allmeaning) > 0):
x = " || ".join(allmeaning)
print("\t\t\t<prop type=\"meaning\">" +
x + "</prop>")
if(len(allsyn) > 0):
x = " || ".join(allsyn)
print("\t\t\t<prop type=\"synonyms\">" +
x + "</prop>")
# -------------------------------- BEGIN ----------------------------------- #
# All languages available
languages = [('german', 'de'), ('french', 'fr'), ('spanish', 'es'),
('chinese', 'ch'), ('russian', 'ru'), ('portuguese', 'pt'), ('italian', 'it'), ('polish', 'pl')]
# See what words and what languages the user wants
opts, args = getopt.getopt(sys.argv[1:], '', ['to='])
# Filter the languages that user wants
languages = list(filter(lambda x: x[1] in opts[0][1], languages))
# End the script if we not have languages or words to translate
if(len(languages) <= 0 or len(args) <= 0):
print("Please enter the corret inputs!")
sys.exit()
notFound = 0
for userWord in args:
types = {}
for language in languages:
urlBase = "https://www.linguee.com/english-" + \
language[0] + "/search?query=" + userWord
response = requests.get(urlBase).content
soup = BS(response, 'html.parser')
dictionary = soup.find('div', id="dictionary")
if(not dictionary):
notFound = 1
print(userWord + ": we don't find any translation for that!")
break
exact = dictionary.find('div', 'exact')
lemma = exact.findAll('div', 'lemma')
for lem in lemma:
word = lem.find('span', 'tag_lemma')
tag_wordtype = word.find('span', 'tag_wordtype')
word_type = tag_wordtype.text
if(not types.get(word_type)):
types[word_type] = []
translation = lem.find('span', 'tag_trans')
types[word_type].append((language[1], translation.a.text))
f = open('english_' + userWord + '2all.tmx', 'w')
sys.stdout = f
print("<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
"<tmx version=\"1.4\">",
"\t<header adminlang=\"en\"",
"\t\tdatatype=\"tbx\"",
"\t\to-tmf=\"unknown\"",
"\t\tsegtype=\"block\"",
"\t\tsrclang=\"en\"/>",
"\t<body>",
sep="\n")
i = 0
for word_type in types.items():
print("\t\t<tu tuid=\"" + str(i+1) + "\">",
"\t\t\t<prop type=\"word_type\">" + word_type[0] +
"</prop>", sep="\n")
getMeaning(userWord, word_type[0])
print("\t\t\t<tuv xml:lang=\"en\">",
"\t\t\t\t<seg>" + userWord + "</seg>",
"\t\t\t</tuv>",
sep="\n")
for translation in word_type[1]:
print("\t\t\t<tuv xml:lang=\"" + translation[0] + "\">",
"\t\t\t\t<seg>" + translation[1] + "</seg>",
"\t\t\t</tuv>",
sep="\n")
i = i + 1
print("\t\t</tu>")
print("\t</body>",
"</tmx>",
sep="\n")
if(notFound == 1):
os.remove('english_' + userWord + '2all.tmx')
notFound = 0
``` |
{
"source": "12arsh12/webots-uav",
"score": 3
} |
#### File: webots-uav/supervisorController/supervisorController.py
```python
import numpy as np
from deepbots.supervisor.controllers.supervisor_emitter_receiver import SupervisorCSV
from PPOAgent import PPOAgent, Transition
from utilities import normalizeToRange
import time
class CartPoleSupervisor(SupervisorCSV):
def __init__(self):
super().__init__()
self.observationSpace = 3 # The agent has 4 inputs
self.actionSpace = 2 # The agent can perform 2 actions
self.robot = None
self.respawnRobot()
# self.poleEndpoint = self.supervisor.getFromDef("POLE_ENDPOINT")
self.messageReceived = None # Variable to save the messages received from the robot
self.episodeCount = 0 # Episode counter
self.episodeLimit = 100 # Max number of episodes allowed
self.stepsPerEpisode = 2000 # Max number of steps per episode
self.episodeScore = 0 # Score accumulated during an episode
self.episodeScoreList = [] # A list to save all the episode scores, used to check if task is solved
def respawnRobot(self):
if self.robot is not None:
# Despawn existing robot
self.robot.remove()
# Respawn robot in starting position and state
rootNode = self.supervisor.getRoot() # This gets the root of the scene tree
childrenField = rootNode.getField('children') # This gets a list of all the children, ie. objects of the scene
childrenField.importMFNode(-1, "Robot2.wbo") # Load robot from file and add to second-to-last position
# Get the new robot and pole endpoint references
self.robot = self.supervisor.getFromDef("ROBOT2")
# self.poleEndpoint = self.supervisor.getFromDef("POLE_ENDPOINT")
def get_observations(self):
obs = self.robot.getPosition()
return obs
def get_reward(self, action=None):
height = self.robot.getPosition()[1]
# print(self.robot.getPosition())
diff = abs(1-height)
return -diff
def is_done(self):
return False
def solved(self):
if len(self.episodeScoreList) > 100: # Over 100 trials thus far
if np.mean(self.episodeScoreList[-100:]) > 195.0: # Last 100 episodes' scores average value
return True
return False
def reset(self):
self.respawnRobot()
self.supervisor.simulationResetPhysics() # Reset the simulation physics to start over
self.messageReceived = None
return self.robot.getPosition()
def get_info(self):
return None
supervisor = CartPoleSupervisor()
agent = PPOAgent(supervisor.observationSpace, supervisor.actionSpace)
solved = False
# Run outer loop until the episodes limit is reached or the task is solved
while not solved and supervisor.episodeCount < supervisor.episodeLimit:
observation = supervisor.reset() # Reset robot and get starting observation
supervisor.episodeScore = 0
time.sleep(5)
for step in range(supervisor.stepsPerEpisode):
# In training mode the agent samples from the probability distribution, naturally implementing exploration
selectedAction, actionProb = agent.work(observation, type_="selectAction")
# print('action')
# Step the supervisor to get the current selectedAction's reward, the new observation and whether we reached
# the done condition
newObservation, reward, done, info = supervisor.step([selectedAction])
# Save the current state transition in agent's memory
trans = Transition(observation, selectedAction, actionProb, reward, newObservation)
agent.storeTransition(trans)
if done:
# Save the episode's score
supervisor.episodeScoreList.append(supervisor.episodeScore)
agent.trainStep(batchSize=step)
solved = supervisor.solved() # Check whether the task is solved
break
supervisor.episodeScore += reward # Accumulate episode reward
observation = newObservation # observation for next step is current step's newObservation
print("Episode #", supervisor.episodeCount, "score:", supervisor.episodeScore)
supervisor.episodeCount += 1 # Increment episode counter
if not solved:
print("Task is not solved, deploying agent for testing...")
elif solved:
print("Task is solved, deploying agent for testing...")
observation = supervisor.reset()
while True:
selectedAction, actionProb = agent.work(observation, type_="selectActionMax")
observation, _, _, _ = supervisor.step([selectedAction])
``` |
{
"source": "12avnisharma/advanced-lane-detection",
"score": 2
} |
#### File: 12avnisharma/advanced-lane-detection/birdseye.py
```python
from helpers import show_dotted_image
import cv2
import numpy as np
class BirdsEye:
def __init__(self, source_points, dest_points, cam_matrix, distortion_coef):
self.spoints = source_points
self.dpoints = dest_points
self.src_points = np.array(source_points, np.float32)
self.dest_points = np.array(dest_points, np.float32)
self.cam_matrix = cam_matrix
self.dist_coef = distortion_coef
self.warp_matrix = cv2.getPerspectiveTransform(self.src_points, self.dest_points)
self.inv_warp_matrix = cv2.getPerspectiveTransform(self.dest_points, self.src_points)
def undistort(self, raw_image, show_dotted = False):
image = cv2.undistort(raw_image, self.cam_matrix, self.dist_coef, None, self.cam_matrix)
if show_dotted:
show_dotted_image(image, self.spoints)
return image
def sky_view(self, ground_image, show_dotted = False):
temp_image = self.undistort(ground_image, show_dotted = False)
shape = (temp_image.shape[1], temp_image.shape[0])
warp_image = cv2.warpPerspective(temp_image, self.warp_matrix, shape, flags = cv2.INTER_LINEAR)
if show_dotted:
show_dotted_image(warp_image, self.dpoints)
return warp_image
def project(self, ground_image, sky_lane, left_fit, right_fit, color = (0, 255, 0)):
z = np.zeros_like(sky_lane)
sky_lane = np.dstack((z, z, z))
kl, kr = left_fit, right_fit
h = sky_lane.shape[0]
ys = np.linspace(0, h - 1, h)
lxs = kl[0] * (ys**2) + kl[1]* ys + kl[2]
rxs = kr[0] * (ys**2) + kr[1]* ys + kr[2]
pts_left = np.array([np.transpose(np.vstack([lxs, ys]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([rxs, ys])))])
pts = np.hstack((pts_left, pts_right))
cv2.fillPoly(sky_lane, np.int_(pts), color)
shape = (sky_lane.shape[1], sky_lane.shape[0])
ground_lane = cv2.warpPerspective(sky_lane, self.inv_warp_matrix, shape)
result = cv2.addWeighted(ground_image, 1, ground_lane, 0.3, 0)
return result
```
#### File: 12avnisharma/advanced-lane-detection/lanefilter.py
```python
import cv2
import numpy as np
from helpers import roi, scale_abs
class LaneFilter:
def __init__(self, p):
self.sat_thresh = p['sat_thresh']
self.light_thresh = p['light_thresh']
self.light_thresh_agr = p['light_thresh_agr']
self.grad_min, self.grad_max = p['grad_thresh']
self.mag_thresh, self.x_thresh = p['mag_thresh'], p['x_thresh']
self.hls, self.l, self.s, self.z = None, None, None, None
self.color_cond1, self.color_cond2 = None, None
self.sobel_cond1, self.sobel_cond2, self.sobel_cond3 = None, None, None
def sobel_breakdown(self, img):
self.apply(img)
b1, b2, b3 = self.z.copy(), self.z.copy(), self.z.copy()
b1[(self.sobel_cond1)] = 255
b2[(self.sobel_cond2)] = 255
b3[(self.sobel_cond3)] = 255
return np.dstack((b1, b2,b3))
def color_breakdown(self, img):
self.apply(img)
b1, b2 = self.z.copy(), self.z.copy()
b1[(self.color_cond1)] = 255
b2[(self.color_cond2)] = 255
return np.dstack((b1, b2, self.z))
def apply(self, rgb_image):
self.hls = cv2.cvtColor(rgb_image, cv2.COLOR_RGB2HLS)
self.l = self.hls[:, :, 1]
self.s = self.hls[:, :, 2]
self.z = np.zeros_like(self.s)
color_img = self.apply_color_mask()
sobel_img = self.apply_sobel_mask()
filtered_img = cv2.bitwise_or(sobel_img, color_img)
return filtered_img
def apply_color_mask(self):
self.color_cond1 = (self.s > self.sat_thresh) & (self.l > self.light_thresh)
self.color_cond2 = self.l > self.light_thresh_agr
b = self.z.copy()
b[(self.color_cond1 | self.color_cond2)] = 1
return b
def apply_sobel_mask(self):
lx = cv2.Sobel(self.l, cv2.CV_64F, 1, 0, ksize = 5)
ly = cv2.Sobel(self.l, cv2.CV_64F, 0, 1, ksize = 5)
gradl = np.arctan2(np.absolute(ly), np.absolute(lx))
l_mag = np.sqrt(lx**2 + ly**2)
slm, slx, sly = scale_abs(l_mag), scale_abs(lx), scale_abs(ly)
b = self.z.copy()
self.sobel_cond1 = slm > self.mag_thresh
self.sobel_cond2 = slx > self.x_thresh
self.sobel_cond3 = (gradl > self.grad_min) & (gradl < self.grad_max)
b[(self.sobel_cond1 & self.sobel_cond2 & self.sobel_cond3)] = 1
return b
``` |
{
"source": "12DEP/hik",
"score": 2
} |
#### File: components/stream/recorder.py
```python
from __future__ import annotations
from collections import deque
from io import BytesIO
import logging
import os
import threading
import av
from av.container import OutputContainer
from homeassistant.core import HomeAssistant, callback
from .const import RECORDER_CONTAINER_FORMAT, SEGMENT_CONTAINER_FORMAT
from .core import PROVIDERS, IdleTimer, Segment, StreamOutput
_LOGGER = logging.getLogger(__name__)
@callback
def async_setup_recorder(hass):
"""Only here so Provider Registry works."""
def recorder_save_worker(file_out: str, segments: deque[Segment]):
"""Handle saving stream."""
if not segments:
_LOGGER.error("Recording failed to capture anything")
return
if not os.path.exists(os.path.dirname(file_out)):
os.makedirs(os.path.dirname(file_out), exist_ok=True)
pts_adjuster: dict[str, int | None] = {"video": None, "audio": None}
output: OutputContainer | None = None
output_v = None
output_a = None
last_stream_id = None
# The running duration of processed segments. Note that this is in av.time_base
# units which seem to be defined inversely to how stream time_bases are defined
running_duration = 0
last_sequence = float("-inf")
for segment in segments:
# Because the stream_worker is in a different thread from the record service,
# the lookback segments may still have some overlap with the recorder segments
if segment.sequence <= last_sequence:
continue
last_sequence = segment.sequence
# Open segment
source = av.open(
BytesIO(segment.init + segment.moof_data),
"r",
format=SEGMENT_CONTAINER_FORMAT,
)
source_v = source.streams.video[0]
source_a = source.streams.audio[0] if len(source.streams.audio) > 0 else None
# Create output on first segment
if not output:
output = av.open(
file_out,
"w",
format=RECORDER_CONTAINER_FORMAT,
container_options={
"video_track_timescale": str(int(1 / source_v.time_base))
},
)
# Add output streams if necessary
if not output_v:
output_v = output.add_stream(template=source_v)
context = output_v.codec_context
context.flags |= "GLOBAL_HEADER"
if source_a and not output_a:
output_a = output.add_stream(template=source_a)
# Recalculate pts adjustments on first segment and on any discontinuity
# We are assuming time base is the same across all discontinuities
if last_stream_id != segment.stream_id:
last_stream_id = segment.stream_id
pts_adjuster["video"] = int(
(running_duration - source.start_time)
/ (av.time_base * source_v.time_base)
)
if source_a:
pts_adjuster["audio"] = int(
(running_duration - source.start_time)
/ (av.time_base * source_a.time_base)
)
# Remux video
for packet in source.demux():
if packet.dts is None:
continue
packet.pts += pts_adjuster[packet.stream.type]
packet.dts += pts_adjuster[packet.stream.type]
packet.stream = output_v if packet.stream.type == "video" else output_a
output.mux(packet)
running_duration += source.duration - source.start_time
source.close()
if output is not None:
output.close()
@PROVIDERS.register("recorder")
class RecorderOutput(StreamOutput):
"""Represents HLS Output formats."""
def __init__(self, hass: HomeAssistant, idle_timer: IdleTimer) -> None:
"""Initialize recorder output."""
super().__init__(hass, idle_timer)
self.video_path = None
@property
def name(self) -> str:
"""Return provider name."""
return "recorder"
def prepend(self, segments: list[Segment]) -> None:
"""Prepend segments to existing list."""
self._segments.extendleft(reversed(segments))
def cleanup(self):
"""Write recording and clean up."""
_LOGGER.debug("Starting recorder worker thread")
thread = threading.Thread(
name="recorder_save_worker",
target=recorder_save_worker,
args=(self.video_path, self._segments),
)
thread.start()
super().cleanup()
``` |
{
"source": "12DEP/qqqqqqqqqqqqqqqqqq",
"score": 2
} |
#### File: src/examples/read_alert_stream.py
```python
import asyncio
import os
from hikvision_isapi.isapi.client import ISAPIClient
from hikvision_isapi.isapi.model import EventNotificationAlert
eventbus = asyncio.Queue()
async def main(hik_client: ISAPIClient):
while True:
event: EventNotificationAlert = await eventbus.get()
# if event_type == 'videoloss' and event_state == 'inactive':
# continue
print(
"Event: {} \tState: {}, \tChannel: {}/{}, \t Time: {}".format(
event.type, event.state, event.channel_id, event.channel_name, str(event.timestamp)
)
)
if __name__ == "__main__":
hik_client = ISAPIClient(os.environ.get("BASE_URL"), os.environ.get("USERNAME"), os.environ.get("PASSWORD"))
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.gather(main(hik_client), hik_client.listen_hikvision_event_stream(eventbus)))
```
#### File: hikvision_isapi/isapi/model.py
```python
from datetime import datetime
from typing import Any, Union
import xmltodict
class BaseHikvisionEntity(object):
XML_ROOT_ELEMENT: str = None
XML_ROOT_LIST_ELEMENT: str = None
XML_PARSE_ATTRS = False
TO_STRING_FIELDS = tuple()
def __init__(self) -> None:
self._xmldict = {}
def _from_field(self, field: str, _default: Any = None) -> Any:
return self._xmldict.get(field)
def _to_field(self, field: str, value: Any):
self._xmldict[field] = value
@classmethod
def from_xml_str(cls, xml_str: Union[str, bytes], resolve_root_array=True):
parsed = xmltodict.parse(xml_str, xml_attribs=cls.XML_PARSE_ATTRS)
if cls.XML_ROOT_LIST_ELEMENT and cls.XML_ROOT_LIST_ELEMENT in parsed:
parsed = parsed.get(cls.XML_ROOT_LIST_ELEMENT)
if cls.XML_ROOT_ELEMENT:
parsed = parsed.get(cls.XML_ROOT_ELEMENT)
if isinstance(parsed, list):
return [cls.from_xml_dict(x) for x in parsed]
else:
return cls.from_xml_dict(parsed)
@classmethod
def from_xml_dict(cls, xml_dict: dict):
result = cls()
result._xmldict = xml_dict
return result
def __repr__(self):
if len(self.TO_STRING_FIELDS) == 0:
props = "obj=" + hex(id(self))
else:
props = ", ".join(["{}={}".format(f, self._from_field(f)) for f in self.TO_STRING_FIELDS])
return "{}({})".format(self.__class__.__name__, props)
class DeviceInfo(BaseHikvisionEntity):
XML_ROOT_ELEMENT = "DeviceInfo"
DEVICE_TYPE_NVR = "NVR"
FIELD_DEVICE_NAME = "deviceName"
FIELD_DEVICE_ID = "deviceID"
FIELD_MODEL = "model"
FIELD_SERIAL_NUMBER = "serialNumber"
FIELD_FIRMWARE_VERSION = "firmwareVersion"
FIELD_FIRMWARE_RELEASE_DATE = "firmwareReleaseDate"
FIELD_DEVICE_TYPE = "deviceType"
@property
def device_name(self) -> str:
return self._from_field(self.FIELD_DEVICE_NAME)
@device_name.setter
def device_name(self, value: str):
self._to_field(self.FIELD_DEVICE_NAME, value)
@property
def device_id(self) -> str:
return self._from_field(self.FIELD_DEVICE_ID)
@device_id.setter
def device_id(self, value: str):
self._to_field(self.FIELD_DEVICE_ID, value)
@property
def model(self) -> str:
return self._from_field(self.FIELD_MODEL)
@model.setter
def model(self, value: str):
self._to_field(self.FIELD_MODEL, value)
@property
def serial_number(self) -> str:
return self._from_field(self.FIELD_SERIAL_NUMBER)
@serial_number.setter
def serial_number(self, value: str):
self._to_field(self.FIELD_SERIAL_NUMBER, value)
@property
def device_type(self) -> str:
return self._from_field(self.FIELD_DEVICE_TYPE)
@device_type.setter
def device_type(self, value: str):
self._to_field(self.FIELD_DEVICE_TYPE, value)
def is_nvr(self) -> bool:
return self.device_type == self.DEVICE_TYPE_NVR
class InputChannel(BaseHikvisionEntity):
XML_ROOT_LIST_ELEMENT = "InputProxyChannelList"
XML_ROOT_ELEMENT = "InputProxyChannel"
FIELD_ID = "id"
FIELD_NAME = "name"
TO_STRING_FIELDS = (FIELD_ID, FIELD_NAME)
@property
def input_id(self) -> str:
return self._from_field(self.FIELD_ID)
@input_id.setter
def input_id(self, value: str):
self._to_field(self.FIELD_ID, value)
@property
def input_name(self) -> str:
return self._from_field(self.FIELD_NAME)
@input_name.setter
def input_name(self, value: str):
self._to_field(self.FIELD_NAME, value)
class EventNotificationAlert(BaseHikvisionEntity):
XML_ROOT_ELEMENT = "EventNotificationAlert"
FIELD_EVENT_TYPE = "eventType"
FIELD_EVENT_DESCRIPTION = "eventDescription"
FIELD_CHANNEL_NAME = "channelName"
FIELD_CHANNEL_ID = "channelID"
FIELD_EVENT_STATE = "eventState"
FIELD_EVENT_TIME = "dateTime"
@property
def type(self) -> str:
return self._from_field(self.FIELD_EVENT_TYPE)
@type.setter
def type(self, value: str):
self._to_field(self.FIELD_EVENT_TYPE, value)
@property
def description(self) -> str:
return self._from_field(self.FIELD_EVENT_DESCRIPTION)
@description.setter
def description(self, value: str):
self._to_field(self.FIELD_EVENT_DESCRIPTION, value)
@property
def channel_name(self) -> str:
return self._from_field(self.FIELD_CHANNEL_NAME)
@channel_name.setter
def channel_name(self, value: str):
self._to_field(self.FIELD_CHANNEL_NAME, value)
@property
def channel_id(self) -> str:
return self._from_field(self.FIELD_CHANNEL_ID)
@channel_id.setter
def channel_id(self, value: str):
self._to_field(self.FIELD_CHANNEL_ID, value)
@property
def state(self) -> str:
return self._from_field(self.FIELD_EVENT_STATE)
@state.setter
def state(self, value: str):
self._to_field(self.FIELD_EVENT_STATE, value)
@property
def timestamp(self) -> datetime:
str_val = self._from_field(self.FIELD_EVENT_TIME)
if str_val is None:
return None
return datetime.fromisoformat(self._from_field(self.FIELD_EVENT_TIME))
@timestamp.setter
def timestamp(self, value: datetime):
self._to_field(self.FIELD_EVENT_TIME, datetime.isoformat(value))
``` |
{
"source": "12DEP/wi",
"score": 2
} |
#### File: custom_components/googlewifi/binary_sensor.py
```python
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import ATTR_NAME
from homeassistant.helpers import entity_platform
from homeassistant.helpers.update_coordinator import UpdateFailed
from . import GoogleWifiEntity, GoogleWiFiUpdater
from .const import (
ATTR_IDENTIFIERS,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SW_VERSION,
COORDINATOR,
DEFAULT_ICON,
DEV_MANUFACTURER,
DOMAIN,
)
SERVICE_RESET = "reset"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the binary sensor platforms."""
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
entities = []
for system_id, system in coordinator.data.items():
entity = GoogleWifiBinarySensor(
coordinator=coordinator,
name=f"Google Wifi System {system_id}",
icon=DEFAULT_ICON,
system_id=system_id,
item_id=None,
)
entities.append(entity)
for ap_id, access_point in system["access_points"].items():
ap_name = "Google Access Point"
if access_point["accessPointSettings"].get("accessPointOtherSettings"):
if access_point["accessPointSettings"]["accessPointOtherSettings"].get(
"apName"
):
ap_name = access_point["accessPointSettings"][
"accessPointOtherSettings"
]["apName"]
if access_point["accessPointSettings"]["accessPointOtherSettings"].get(
"roomData"
):
if access_point["accessPointSettings"]["accessPointOtherSettings"][
"roomData"
].get("name"):
ap_name = f"{access_point['accessPointSettings']['accessPointOtherSettings']['roomData']['name']} Access Point"
entity = GoogleWifiBinarySensor(
coordinator=coordinator,
name=ap_name,
icon=DEFAULT_ICON,
system_id=system_id,
item_id=ap_id,
)
entities.append(entity)
async_add_entities(entities)
# register service for reset
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_RESET,
{},
"async_reset_device",
)
class GoogleWifiBinarySensor(GoogleWifiEntity, BinarySensorEntity):
"""Defines a Google WiFi sensor."""
def __init__(self, coordinator, name, icon, system_id, item_id):
"""Initialize the sensor."""
super().__init__(
coordinator=coordinator,
name=name,
icon=icon,
system_id=system_id,
item_id=item_id,
)
self._state = None
self._device_info = None
@property
def is_on(self) -> bool:
"""Return the on/off state of the sensor."""
try:
state = False
if self._item_id:
if (
self.coordinator.data[self._system_id]["access_points"][
self._item_id
]["status"]
== "AP_ONLINE"
):
state = True
else:
if self.coordinator.data[self._system_id]["status"] == "WAN_ONLINE":
state = True
self._state = state
except TypeError:
pass
except KeyError:
pass
return self._state
@property
def device_info(self):
"""Define the device as an individual Google WiFi system."""
try:
device_info = {
ATTR_MANUFACTURER: DEV_MANUFACTURER,
ATTR_NAME: self._name,
}
if self._item_id:
device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self._item_id)}
this_data = self.coordinator.data[self._system_id]["access_points"][
self._item_id
]
device_info[ATTR_MANUFACTURER] = this_data["accessPointProperties"][
"hardwareType"
]
device_info[ATTR_SW_VERSION] = this_data["accessPointProperties"][
"firmwareVersion"
]
device_info["via_device"] = (DOMAIN, self._system_id)
else:
device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self._system_id)}
device_info[ATTR_MODEL] = "Google Wifi"
device_info[ATTR_SW_VERSION] = self.coordinator.data[self._system_id][
"groupProperties"
]["otherProperties"]["firmwareVersion"]
self._device_info = device_info
except TypeError:
pass
except KeyError:
pass
return self._device_info
async def async_reset_device(self):
"""Reset the network or specific access point."""
if self._item_id:
success = await self.coordinator.api.restart_ap(self._item_id)
if success:
self.async_schedule_update_ha_state()
else:
raise ConnectionError("Failed to reset access point.")
else:
success = await self.coordinator.api.restart_system(self._system_id)
if success:
self.async_schedule_update_ha_state()
else:
raise ConnectionError("Failed to reset the Google Wifi system.")
```
#### File: custom_components/googlewifi/const.py
```python
from homeassistant.const import (
ATTR_NAME,
DATA_RATE_BITS_PER_SECOND,
DATA_RATE_BYTES_PER_SECOND,
DATA_RATE_GIGABITS_PER_SECOND,
DATA_RATE_GIGABYTES_PER_SECOND,
DATA_RATE_KILOBITS_PER_SECOND,
DATA_RATE_KILOBYTES_PER_SECOND,
DATA_RATE_MEGABITS_PER_SECOND,
DATA_RATE_MEGABYTES_PER_SECOND,
)
DOMAIN = "googlewifi"
COORDINATOR = "coordinator"
GOOGLEWIFI_API = "googlewifi_api"
ATTR_IDENTIFIERS = "identifiers"
ATTR_MANUFACTURER = "manufacturer"
ATTR_MODEL = "model"
ATTR_SW_VERSION = "sw_version"
ATTR_CONNECTIONS = "connections"
POLLING_INTERVAL = 30
REFRESH_TOKEN = "<PASSWORD>"
DEV_MANUFACTURER = "Google"
DEV_CLIENT_MODEL = "Connected Client"
DEFAULT_ICON = "mdi:wifi"
PAUSE_UPDATE = 15
ADD_DISABLED = "add_disabled"
CONF_SPEEDTEST = "auto_speedtest"
DEFAULT_SPEEDTEST = True
CONF_SPEEDTEST_INTERVAL = "speedtest_interval"
DEFAULT_SPEEDTEST_INTERVAL = 24
CONF_SPEED_UNITS = "speed_units"
SIGNAL_ADD_DEVICE = "googlewifi_add_device"
SIGNAL_DELETE_DEVICE = "googlewifi_delete_device"
def unit_convert(data_rate: float, unit_of_measurement: str):
"""Convert the speed based on unit of measure."""
if unit_of_measurement == DATA_RATE_BYTES_PER_SECOND:
data_rate *= 0.125
elif unit_of_measurement == DATA_RATE_KILOBYTES_PER_SECOND:
data_rate *= 0.000125
elif unit_of_measurement == DATA_RATE_MEGABYTES_PER_SECOND:
data_rate *= 1.25e-7
elif unit_of_measurement == DATA_RATE_GIGABYTES_PER_SECOND:
data_rate *= 1.25e-10
elif unit_of_measurement == DATA_RATE_KILOBITS_PER_SECOND:
data_rate *= 0.001
elif unit_of_measurement == DATA_RATE_MEGABITS_PER_SECOND:
data_rate *= 1e-6
elif unit_of_measurement == DATA_RATE_GIGABITS_PER_SECOND:
data_rate *= 1e-9
return round(data_rate, 2)
``` |
{
"source": "12doge-LEO/imiRecorder",
"score": 3
} |
#### File: imiRecorder/src/db_helper.py
```python
import pymongo
class dbConnector:
def __init__(self):
self.__db_name = "imiDB"
self.__user_name = "doge12"
self.__user_pwd = "<PASSWORD>"
self.__token = f"mongodb+srv://{self.__user_name}:{self.__user_pwd}@cluster0.mk62e.mongodb.net/<PASSWORD>?retryWrites=true&w=majority"
self.__client = pymongo.MongoClient(self.__token)
self.__collection = "imiRecords"
def _get_collection(self):
return self.__collection
def test(self):
print(self.__client[self._get_collection()])
def insert(self, data: dict):
if self.__client[self._get_collection()]["db1"].find({"rid": data["rid"]}).count() > 0:
print("record exists")
return False
else :
return self.__client[self._get_collection()]["db1"].insert_one(data)
imi_db_connector = dbConnector()
```
#### File: imiRecorder/src/recordDownloader.py
```python
import requests
import json, os, zipfile
import time
from typing import List, Dict
import matplotlib.pyplot as plt
class Record:
def __init__(self, name='', rid='', date='', urls=[], cover_url='', start_timestamp=0, end_timestamp=0):
self.name = name
self.date = date
self.rid = rid
self.urls = urls # type: List[str]
self.cover_url = cover_url
self.raw_dm = []
self.analyzed_dm = []
self.reference = {
'Referrer Policy': 'strict-origin-when-cross-origin'
}
self.headers = {
'User-Agent': 'Mozilla / 5.0(WindowsNT 10.0; Win64;x64) AppleWebKit / 537.36(KHTML, likeGecko) Chrome / '
'87.0.4280.141 Safari / 537.36 '
}
self.start_timestamp = start_timestamp
self.end_timestamp = end_timestamp
self.file_path = ''
self.TIME_STEP = 180
self.proxies = {
'http': 'http://' + '172.16.31.10:43664/',
'https': 'https://' + '172.16.31.10:43664/',
}
def to_dict(self):
record_dict = {"name": self.name, "date": self.date, "rid": self.rid, "urls": self.urls, "raw_dm": self.raw_dm,
"analyzed_dm": self.analyzed_dm, "start_timestamp": self.start_timestamp,
"end_timestamp": self.end_timestamp}
return record_dict
def download(self):
for url in self.urls:
file_name = './' + url.split('?')[0].split('/')[-1].replace(':', '-')
file_name = '-'.join(file_name.split('-')[1:])
record_video_name = self.name.replace(' ', '')
record_video_name = record_video_name.replace('|', '')
file_name = record_video_name + '-' + file_name
response = requests.get(url, headers=self.headers, params=self.reference)
with open(file_name, 'wb') as f:
f.write(response.content)
def draw_dm_time_map(self, temp_dir=''):
plt.figure(figsize=(15, 15))
max_index = int((self.end_timestamp - self.start_timestamp) / self.TIME_STEP)
x_data = [i for i in range(0, max_index)]
y_data = self.analyzed_dm[0:max_index]
plt.plot(x_data, y_data, label='count', linewidth=3, color='b', marker='o',
markerfacecolor='blue', markersize=5)
# 横坐标描述
plt.xlabel('time')
# 纵坐标描述
plt.ylabel('count')
# 设置数字标签
for a, b in zip(x_data, y_data):
plt.text(a, b, '({},{})'.format(a, b), ha='center', va='bottom', fontsize=15)
plt.savefig(temp_dir + '/{}'.format(self.name) + '_dm_figure.png')
plt.close()
def sava_dm_as_json(self, temp_dir):
with open(temp_dir + '/{}'.format(self.name) + '_dm.json', 'w'
) as result:
json.dump(self.raw_dm, result, ensure_ascii=False)
def save_analyzed_dm_data(self, temp_dir):
def get_dm_content_and_time(_dm_data):
_dm_content = [] # type: List[Dict]
for temp_dm in _dm_data:
_dm_content.append(
{'time': int(temp_dm['check_info']['ts'] / 1000), 'text': temp_dm['text']})
return _dm_content
time_dm_content = get_dm_content_and_time(self.raw_dm)
with open(temp_dir + '/{}'.format(self.name) + '_analyzed_dm.json', 'w'
) as result:
json.dump(time_dm_content, result, ensure_ascii=False)
def save_live_record_url(self, temp_dir):
with open(temp_dir + '/{}'.format(self.name) + '_record_urls.txt', 'w') as file:
for url in self.urls:
file.write(url + '\n')
def save_cover(self, temp_dir):
cover = requests.get(self.cover_url)
with open(temp_dir + '/{}'.format(self.name) + '_cover.png', 'wb') as file:
file.write(cover.content)
def daily_workflow(self):
temp_dir = '../resource/' + self.name + '_' + str(
time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(int(self.start_timestamp))))
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
self.file_path = temp_dir
else:
self.file_path = temp_dir
return False
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file),
os.path.relpath(os.path.join(root, file), os.path.join(path, '..')))
try:
self.draw_dm_time_map(temp_dir)
self.sava_dm_as_json(temp_dir)
self.save_analyzed_dm_data(temp_dir)
self.save_live_record_url(temp_dir)
self.save_cover(temp_dir)
except:
print('DM error find please check')
file_path = self.file_path
zip_file_name = '../resource' + '/{}'.format(self.name) + '.zip'
zipf = zipfile.ZipFile(zip_file_name, 'w', zipfile.ZIP_DEFLATED)
zipdir(file_path, zipf)
zipf.close()
with open('../resource/list.txt', 'a+') as file:
file.write(self.rid + '\n')
return True
class RecordDownloader:
def __init__(self, max_count=100):
self.url = 'https://api.live.bilibili.com/xlive/web-room/v1/record/getLiveRecordUrl?'
self.live_room_url = 'https://api.live.bilibili.com/xlive/web-room/v1/record/getList?room_id=22605466&page=1' \
'&page_size=20 '
self.record_list = []
self.platform = 'html5'
self.reference = {
'Referrer Policy': 'strict-origin-when-cross-origin'
}
self.headers = {
'User-Agent': 'Mozilla / 5.0(WindowsNT 10.0; Win64;x64) AppleWebKit / 537.36(KHTML, likeGecko) Chrome / '
'87.0.4280.141 Safari / 537.36 '
}
self.TIME_STEP = 180
self.max_count = max_count
self.proxies = {
'http': 'http://' + '172.16.31.10:43664/',
'https': 'https://' + '172.16.31.10:43664/',
}
def get_recent_record_id(self):
self.live_room_url = 'https://api.live.bilibili.com/xlive/web-room/v1/record/getList?room_id=22605466&page=1' \
'&page_size=20'
response = requests.get(self.live_room_url, headers=self.headers, params=self.reference)
self.record_list = response.json()['data']['list']
def get_live_record_by_id(self, record_id):
record_request_url = self.url + 'rid={}'.format(record_id) + '&platform=' + self.platform
response = requests.get(record_request_url, params=self.reference)
record_video_urls = [item['url'] for item in response.json()['data']['list']]
return record_video_urls
def download_files(self, record_video_urls, record_video_name):
for url in record_video_urls:
file_name = './' + url.split('?')[0].split('/')[-1].replace(':', '-')
file_name = '-'.join(file_name.split('-')[1:])
record_video_name = record_video_name.replace(' ', '')
record_video_name = record_video_name.replace('|', '')
file_name = record_video_name + '-' + file_name
response = requests.get(url, headers=self.headers, params=self.reference)
with open(file_name, 'wb') as f:
f.write(response.content)
def get_dm_pool(self, rid, total_time):
dm_data = []
dm_url = 'https://api.live.bilibili.com/xlive/web-room/v1/dM/getDMMsgByPlayBackID'
for i in range(0, int(total_time / 3)):
# if i % 10 == 0 or i == int(total_time / 3) - 1:
# print('Getting DM index : {}/{}'.format(i, int(total_time / 3)))
params = {
'rid': rid,
'index': str(i),
'Connection': 'keep-alive'
}
response = requests.get(dm_url, headers=self.headers, params=params)
# time.sleep(1)
assert response.status_code == 200
try:
dm_data.extend(response.json()['data']['dm']['dm_info'])
except:
# print('Error: dm get failed')
continue
return dm_data
def create_record_instance(self):
record_rids = []
try:
with open('../resource/list.txt', 'r+') as file:
record_rids = file.read()
pass
except:
print('List file is not exist, pass')
temp_record_list = []
count = 0
for record in self.record_list:
if record['rid'] in record_rids:
count += 1
# print('{} is exist, continue'.format(record['rid']))
if count >= self.max_count:
break
continue
temp_record = Record(rid=record['rid'], urls=self.get_live_record_by_id(record['rid']),
name=record['title'], cover_url=record['cover'],
start_timestamp=record['start_timestamp'], end_timestamp=record['end_timestamp'])
print('Creating record instance: ' + str(temp_record.name))
temp_record.raw_dm = self.get_dm_pool(temp_record.rid,
(temp_record.end_timestamp - temp_record.start_timestamp) / 60)
# print('Begin DM analysis...')
temp_record.analyzed_dm = self.analyze_dm_by_timestamp(temp_record.raw_dm)
# print('DM instance created')
temp_record_list.append(temp_record)
count += 1
if count >= self.max_count:
break
return temp_record_list
def analyze_dm_by_timestamp(self, dm_data):
def get_dm_content_and_time(_dm_data):
_dm_content = [] # type: List[Dict]
for temp_dm in _dm_data:
_dm_content.append(
{'time': int(temp_dm['check_info']['ts'] / 1000), 'text': temp_dm['text']})
return _dm_content
time_dm_matrix = []
dm_content = get_dm_content_and_time(dm_data)
max_timestamps = 0
for item in dm_content:
if item['time'] > max_timestamps:
max_timestamps = item['time']
try:
for i in range(0, int((max_timestamps - dm_content[0]['time']) / self.TIME_STEP) + 1):
time_dm_matrix.append(0)
for item in dm_content:
time_dm_matrix[int((item['time'] - dm_content[0]['time']) / self.TIME_STEP)] += 1
except:
print('Dm data error, please check raw dm data')
return time_dm_matrix
``` |
{
"source": "12DReflections/cab_trips",
"score": 3
} |
#### File: 12DReflections/cab_trips/app.py
```python
import datetime
import os
import sys
from flask import Flask, redirect, url_for, request, jsonify, make_response
from models import Medallions
from database import db_session
from flask_sqlalchemy import SQLAlchemy
from werkzeug.security import generate_password_hash, check_password_hash
from functools import wraps
from flask_caching import Cache
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.secret_key = os.environ['APP_SECRET_KEY']
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/medallion', methods=['POST'])
@cache.cached(timeout=50)
def medallionquery():
data = request.get_json()
pickup_date = datetime.datetime.strptime(data['pickup_date'], '%Y-%m-%d').date() # yyyy-mm-dd
medal_req = data['medallions'] # comma separated string
medal_req = [x.strip() for x in medal_req.split(',')] # strip whitespace and turn medallions to a list
medallions = Medallions.query.all()
output = {}
for m in medallions:
medallion = {}
medallion['medalallion'] = m.medallion
medallion['pickup_date'] = m.pickup_datetime.date()
if medallion['pickup_date'] == pickup_date and medallion['medalallion'] in medal_req: # if medal and date match
# then add to or append to result list
if medallion['medalallion'] not in output:
output[medallion['medalallion']] = 1
else:
output[medallion['medalallion']] += 1
output['response'] = 200
return jsonify(output)
@app.route('/clearcache')
def clearcache():
with app.app_context():
cache.clear()
return jsonify({'result' : "success, cache is cleared", "response": 200})
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5090, debug=True)
``` |
{
"source": "12DReflections/latitude_assessment",
"score": 3
} |
#### File: 12DReflections/latitude_assessment/file_reader.py
```python
import io
import json
import logging
import codecs
import re
class File_Reader:
'''Create the file reader class'''
'''The class reads in a spec file and an input file.
First the input file is converted into windows-1252 encoding,
then the file is decoded to a CSV file.'''
def __init__(self, spec_name='spec.json', input_file='text_input.txt'):
self.spec_filepath = spec_name
self.input_file = input_file
def run(self):
f = File_Reader()
spec_data = self.input_spec(f.spec_filepath)
# Currently outputs to fixed_framed.lat
fixed_frame = self.convert_to_fixed_width(spec_data, self.input_file)
print("Fixed frame file save location: " + fixed_frame)
csv_output = self.parser(spec_data, fixed_frame)
print("CSV Output save location: " + csv_output)
return 0
def input_spec(self, specfile):
try:
with open(specfile) as json_file:
spec_data = json.load(json_file)
except Exception as err:
logging.error('Cannot load the spec')
logging.error(err)
# Convert to variables from unicode in the spec
offset_integers = []
for sub in spec_data['Offsets']:
offset_integers.append(int(sub))
spec_data['Offsets'] = offset_integers
spec_data['IncludeHeader'] = bool(spec_data['IncludeHeader'])
return spec_data
# A parser that can parse the fixed width file and generate a CSV
def parser(self, spec, fixed_frame_input_file="fixed_frame.lat"):
try:
f = io.open(fixed_frame_input_file, mode="r", encoding="cp1252")
fixed_frame = f.readlines()
output_csv = 'csv_output.csv'
# Strip whitespace and write to CSV
with open(output_csv,'w') as file:
for line in fixed_frame:
cleaned_string = ','.join([w.strip() for w in line.split(';')])
file.write(cleaned_string + '\n')
f.close()
except Exception as err:
logging.error(err)
return output_csv
def convert_to_fixed_width(self, spec_data, input_file):
# Generate fixed line file with delimeters showing frames
frame_offsets = self.get_frame_offsets(spec_data)
output_file = 'fixed_frame.lat'
with open(input_file, 'r') as f:
with codecs.open(output_file, 'wb', 'cp1252') as writer:
if spec_data['IncludeHeader']:
column_headers = ''
for h in spec_data['ColumnNames']:
column_headers += h + ';'
column_headers = self.header_splitter(spec_data['ColumnNames'], spec_data['Offsets'])
writer.write(column_headers)
# Apply the fixed line delimeters and write to encoded file
for i, line in enumerate(f.readlines()):
splitted_line = self.line_splitter(line, frame_offsets)
writer.write(splitted_line)
return output_file
def header_splitter(self, line, frame_offsets, delimiter = ';'):
# Split a line at the frame offsets
line_fixed = ''
for i, n in enumerate(frame_offsets):
# Build the line [TODO] switch to inline string one-liner
fixed_frames = ' ' * (int(frame_offsets[i]) - 2)
line_fixed += line[i]
line_fixed += fixed_frames + delimiter
return line_fixed
def line_splitter(self, line, frame_offsets, delimiter = ';'):
# Split a line at the frame offsets
for i, n in enumerate(frame_offsets):
if i == len(frame_offsets) - 1:
continue
line = re.sub(r"^(.{%d})()" % (n + i), r"\1%s" % delimiter, line)
return line
def get_frame_offsets(self, spec_data):
# Get the aggregate offsets to return the spacing
aggregate_offset = []
for i, width in enumerate(spec_data['Offsets']):
if i == 0:
aggregate_offset.append(int(width))
else:
distance_sum = width + aggregate_offset[i-1]
aggregate_offset.append(distance_sum)
return aggregate_offset
``` |
{
"source": "12DReflections/NatLangMachineLearning",
"score": 3
} |
#### File: NatLangMachineLearning/4_Text_Classification/single_classification.py
```python
def find_features(document_list, training_d_list):
article_words = set(document_list)
features = {}
for w in training_d_list:
features[w] = (w in article_words)
# a boolean dict of words from our list that are in an article
return features
training_data = "this sentence string format yellow orange purple green"
training_data_list = training_data.split()
sentence = "this is a sentence of words in a string format alfalfa"
sentence_list = sentence.split()
feat = find_features(sentence_list, training_data_list)
print feat
```
#### File: NatLangMachineLearning/4_Text_Classification/text_classification.py
```python
import nltk
import random
from nltk.corpus import movie_reviews
# Text Classification with nltk library
# Works if you have a body of a words and a boolean classification ie (positive, negative) as a tuple
documents = [(list(movie_reviews.words(fileid)), category)
for category in movie_reviews.categories()
for fileid in movie_reviews.fileids(category)]
# # Same as
# documents = []
# for category in movie_reviews.categories():
# for fileid in movie_reviews.fileids(category):
# documents.append(list(movie_reviews.words(fileid)), category)
# Train data
random.shuffle(documents)
# Get the full list of words
all_words = []
for w in movie_reviews.words():
all_words.append(w.lower())
# Convert list to frequency distribution and print most common words
all_words = nltk.FreqDist(all_words)
#print all_words.most_common(15)
# Train on the top 3000 words
word_features = list(all_words.keys())[:3000]
# training_data = "this sentence string format yellow orange purple green"
# word_features = training_data.split()
# A set removes doubled elements, only single representation
def find_features(document):
words = set(document)
features = {}
for w in word_features:
# (w in words) creates boolean value for each word, if word in document it will return true, else it will return false
features[w] = (w in words)
return features
# sentence = "this is a sentence of words in a string format alfalfa"
# feat = find_features(sentence)
# print feat
print((find_features(movie_reviews.words('neg/cv000_29416.txt'))))
featuresets = [(find_features(rev), category) for (rev, category) in documents]
print 'kitty'
```
#### File: NatLangMachineLearning/5_naive_bayes/bayes.py
```python
import nltk
import random
from nltk.corpus import movie_reviews
from nltk.tokenize import word_tokenize
# A naive bayes algorithm, finding the words which lead to a positive/negative classification of an article
# Text Classification with nltk library
# Works if you have a body of a words and a boolean classification ie (positive, negative) as a tuple
documents = [(list(movie_reviews.words(fileid)), category)
for category in movie_reviews.categories()
for fileid in movie_reviews.fileids(category)]
# # Same as
# documents = []
# for category in movie_reviews.categories():
# for fileid in movie_reviews.fileids(category):
# documents.append(list(movie_reviews.words(fileid)), category)
# Train data
random.shuffle(documents)
# Get the full list of words
all_words = []
for w in movie_reviews.words():
all_words.append(w.lower())
# Convert list to frequency distribution and print most common words
all_words = nltk.FreqDist(all_words)
#print all_words.most_common(15)
# Train on the top 3000 words
word_features = list(all_words.keys())[:3000]
# training_data = "this sentence string format yellow orange purple green"
# word_features = training_data.split()
# A set removes doubled elements, only single representation
def find_features(document):
words = word_tokenize(document)
print words
#print words
features = {}
for w in word_features:
# (w in words) creates boolean value for each word, if word in document it will return true, else it will return false
features[w] = (w in words)
print features
return features
# sentence = "this is a sentence of words in a string format alfalfa"
# feat = find_features(sentence)
# print feat
#print((find_features(movie_reviews.words('neg/cv000_29416.txt'))))
clas_sentence = 'refreshingly dismissed madsen'
clas = find_features(clas_sentence)
# The existence of words in a document, from our training words, and whether they lead to a positive/negative category
featuresets = [(find_features(rev), category) for (rev, category) in documents]
#print featuresets[1]
training_set = featuresets[:1900]
testing_set = featuresets[1900:]
# Naive bayes
# post_occurence = prior_occurrence * liklihood / evidence
# Show whether a word is more likely to be in a positive or negative review
classifier = nltk.NaiveBayesClassifier.train(training_set)
print("Naive Bayes Algo accuracy percent: ", (nltk.classify.accuracy(classifier, testing_set)) * 100)
classifier.show_most_informative_features(15)
print classifier.classify(clas)
#print clas
print 'kitty'
``` |
{
"source": "12ds95/prototypical-networks",
"score": 3
} |
#### File: protonets/utils/visual.py
```python
from visdom import Visdom
import numpy as np
viz = Visdom()
def train_val_acc(title):
"""
"""
layout = dict(legend=['train', 'val'], title=title+" Acc Plot")
return plotTwoLine(layout)
def train_val_loss(title):
"""
"""
layout = dict(legend=['train', 'val'], title=title+" Loss Plot")
return plotTwoLine(layout)
def plotTwoLine(layout):
"""
"""
win = None
prevX = 0
prevYa = 0
prevYb = 0
def update_plotTwoLine(x, ya, yb):
def update():
nonlocal prevX, prevYa, prevYb, win
if win is None:
prevX = x
prevYa = ya
prevYb = yb
win = viz.line(
X=np.column_stack((np.linspace(prevX, x, 10), np.linspace(prevX, x, 10))),
Y=np.column_stack((np.linspace(prevYa, ya, 10),
np.linspace(prevYb, yb, 10))),
opts=layout
)
else:
viz.line(
X=np.column_stack((np.linspace(prevX, x, 10), np.linspace(prevX, x, 10))),
Y=np.column_stack((np.linspace(prevYa, ya, 10),
np.linspace(prevYb, yb, 10))),
win=win,
update='append'
)
update()
nonlocal prevX, prevYa, prevYb
prevX = x
prevYa = ya
prevYb = yb
return update_plotTwoLine
if __name__ == '__main__':
from time import sleep
pic = train_val_acc("First Plot")
pic(1, 0.5, 0.1)
sleep(1)
pic(2, 0.2, 0.3)
``` |
{
"source": "12f23eddde/PKUAutoElective",
"score": 2
} |
#### File: autoelective/captcha/classifier.py
```python
__all__ = ["KNN","SVM","RandomForest"]
import os
import re
from sklearn.neighbors.classification import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.ensemble.forest import RandomForestClassifier
from sklearn.externals import joblib
from .feature import get_feature_extractor
from ..const import MODEL_DIR
from ..utils import Singleton
_regexModelFilename = re.compile(
pattern=(
r'^(?P<alg>\S+)\.model\.'
r'f(?P<feature>[1-5])\.'
r'(?:l(?P<level>\d{1})\.)*'
r'c(?P<compress>\d{1})'
r'(?P<ext>\.z|\.gz|\.bz2|\.xz|\.lzma)$'
),
flags=re.I,
)
def _get_MODEL_FILES():
model_files = {}
for file in os.listdir(MODEL_DIR):
res = _regexModelFilename.match(file)
if res is not None:
filename = res.group()
resDict = res.groupdict()
alg = resDict.pop("alg")
resDict["path"] = os.path.abspath(os.path.join(MODEL_DIR, filename))
model_files[alg] = resDict
return model_files
_MODEL_FILES = _get_MODEL_FILES()
class BaseClassifier(object, metaclass=Singleton):
ALG = ""
def __init__(self):
if self.__class__ is __class__:
raise NotImplementedError
clf, feature = self._load_model()
self._clf = clf
self.feature = feature
def _load_model(self):
alg = self.__class__.ALG
detail = _MODEL_FILES.get(alg)
path, fCode, lCode = map(detail.__getitem__, ("path","feature","level"))
feature = get_feature_extractor(fCode, lCode)
if path is None:
raise FileNotFoundError("model %s.* is missing" % alg)
return joblib.load(path), feature
def predict(self, Xlist):
return self._clf.predict(Xlist)
class RandomForest(BaseClassifier):
ALG = "RandomForest"
class KNN(BaseClassifier):
ALG = "KNN"
class SVM(BaseClassifier):
ALG = "SVM"
```
#### File: autoelective/captcha/feature.py
```python
__all__ = ["get_feature_extractor"]
from functools import partial
import numpy as np
def _feature1(img):
""" 遍历全部像素 """
ary = np.array(img.convert("1"))
ary = 1 - ary # 反相
return ary.flatten()
def _feature2(img):
""" feature2 降维 """
ary = np.array(img.convert("1"))
ary = 1 - ary # 反相
return np.concatenate([ary.sum(axis=0), ary.sum(axis=1)])
def _feature3(img, level):
""" 考虑临近像素的遍历 """
ary = np.array(img.convert("1"))
ary = 1 - ary # 反相
l = level
featureVector = []
for i in range(l, ary.shape[0]-l):
for j in range(l, ary.shape[1]-l):
i1, i2, j1, j2 = i-l, i+l+1, j-l, j+l+1
featureVector.append(np.sum(ary[i1:i2, j1:j2])) # sum block
return np.array(featureVector)
def _feature4(img, level):
""" feature3 降维 """
ary = _feature3(img, level)
s = int(np.sqrt(ary.size))
assert s**2 == ary.size # 确保为方
ary.resize((s,s))
return np.concatenate([ary.sum(axis=0), ary.sum(axis=1)])
def _feature5(img, level):
""" feature3 改版,给接近中心的点增加权重
weight 矩阵例如:
array([[1, 1, 1, 1, 1],
[1, 2, 2, 2, 1],
[1, 2, 3, 2, 1],
[1, 2, 2, 2, 1],
[1, 1, 1, 1, 1]])
"""
ary = np.array(img.convert("1"))
ary = 1 - ary # 反相
l = level
s = size = 2 * l + 1
weight = np.zeros(s**2, dtype=np.int).reshape((s,s))
for k in range(l+1):
mask = np.array([ k <= i < s-k and k <= j < s-k for i in range(s) for j in range(s) ]).reshape((s,s))
weight[mask] += (k + 1)**2 # 等比数列
featureVector = []
for i in range(l, ary.shape[0]-l):
for j in range(l, ary.shape[1]-l):
i1, i2, j1, j2 = i-l, i+l+1, j-l, j+l+1
featureVector.append(np.sum(ary[i1:i2, j1:j2]*weight)) # sum block with weight
return np.array(featureVector)
_FEATURE_MAP = {
"1": _feature1,
"2": _feature2,
"3": _feature3,
"4": _feature4,
"5": _feature5,
}
def get_feature_extractor(feature, level=""):
feature = str(feature)
if feature in ("1","2"):
func = _FEATURE_MAP[feature]
elif feature in ("3","4","5"):
if level == "":
raise ValueError("level must be given for feature %s" % feature)
level = int(level)
if level <= 0:
raise ValueError("level must be a positive integer, not %s" % level)
func = partial(_FEATURE_MAP[feature], level=level)
return func
```
#### File: PKUAutoElective/autoelective/loop.py
```python
__all__ = ["main"]
import sys
import time
import random
from queue import Queue, Empty
from collections import deque
from threading import Thread, Lock
from requests.compat import json
from requests.exceptions import RequestException
from . import __version__, __date__
from .iaaa import IAAAClient
from .elective import ElectiveClient
from .captcha import CaptchaRecognizer
from .course import Course
from .config import AutoElectiveConfig
from .parser import load_course_csv, get_tables, get_courses, get_courses_with_detail, get_sida
from .logger import ConsoleLogger, FileLogger
from .exceptions import *
cout = ConsoleLogger("loop")
ferr = FileLogger("loop.error") # loop 的子日志,同步输出到 console
config = AutoElectiveConfig()
interval = config.refreshInterval
deviation = config.refreshRandomDeviation
isDualDegree = config.isDualDegree
identity = config.identity
page = config.supplyCancelPage
loginLoopInterval = config.loginLoopInterval
electivePoolSize = config.electiveClientPoolSize
config.check_identify(identity)
config.check_supply_cancel_page(page)
recognizer = CaptchaRecognizer()
electivePool = Queue(maxsize=electivePoolSize)
reloginPool = Queue(maxsize=electivePoolSize)
shouldKillAllThreads = False
class _ElectiveNeedsLogin(Exception):
pass
def _get_refresh_interval():
if deviation <= 0:
return interval
delta = (random.random() * 2 - 1) * deviation * interval
return interval + delta
def _has_candidates(goals, ignored):
_ignored = [ x[0] for x in ignored ]
count = 0
for course in goals:
if course in _ignored:
continue
count += 1
return count > 0
def _get_available_courses(goals, plans, elected, ignored):
queue = deque()
_ignored = [ x[0] for x in ignored ]
for c0 in goals:
if c0 in _ignored:
continue
for c in elected:
if c == c0:
cout.info("%s is elected, ignored" % c0)
ignored.append( (c0, "Elected") )
break
else:
for c in plans:
if c == c0:
if c.is_available():
queue.append(c)
cout.info("%s is AVAILABLE now !" % c)
break
else:
raise NotInCoursePlanException("%s is not in your course plan." % c0)
return queue
def _task_setup_pools():
for i in range(1, electivePoolSize+1):
electivePool.put_nowait(ElectiveClient(id=i))
def _task_print_header():
header = "# PKU Auto-Elective Tool v%s (%s) #" % (__version__, __date__)
line = "#" + "-"*(len(header) - 2) + "#"
cout.info(line)
cout.info(header)
cout.info(line)
cout.info("")
def _task_print_goals(goals, ignored):
""" 输出待选课程 """
if not _has_candidates(goals, ignored):
return
line = "-" * 30
cout.info("> Current tasks")
cout.info(line)
idx = 1
_ignored = [ x[0] for x in ignored ]
for course in goals:
if course in _ignored:
continue
cout.info("%02d. %s" % (idx, course))
idx += 1
cout.info(line)
cout.info("")
def _task_print_ignored(ignored):
""" 输出忽略列表 """
if len(ignored) == 0:
return
line = "-" * 30
cout.info("> Ignored tasks")
cout.info(line)
idx = 1
for course, reason in ignored:
cout.info("%02d. %s %s" % (idx, course, reason))
idx += 1
cout.info(line)
cout.info("")
def _task_validate_captcha(elective):
""" 填一次验证码 """
while True:
cout.info("Fetch a captcha")
r = elective.get_DrawServlet()
captcha = recognizer.recognize(r.content)
code = captcha.code
cout.info("Recognition result: %s" % code)
r = elective.get_Validate(code)
try:
res = r.json()["valid"] # 可能会返回一个错误网页 ...
except Exception as e:
ferr.error(e)
raise OperationFailedError(msg="Unable to validate captcha")
if res == "2":
cout.info("Validation passed!")
captcha.clear_cache()
cout.info("Clear captcha cache")
break
elif res == "0":
cout.info("Validation failed, try again")
else:
cout.warning("Unknown validation result: %s" % validRes)
def _thread_login_loop(status):
elective = None
shouldQuitImmediately = False
global shouldKillAllThreads
while True:
if shouldKillAllThreads:
break
shouldQuitImmediately = False
if elective is None:
elective = reloginPool.get()
try:
cout.info("Try to login IAAA (client: %s)" % elective.id)
iaaa = IAAAClient() # not reusable
r = iaaa.oauth_login()
try:
token = r.json()["token"]
except Exception as e:
ferr.error(e)
raise OperationFailedError(msg="Unable to parse IAAA token. response body: %s" % r.content)
elective.clear_cookies()
r = elective.sso_login(token)
if isDualDegree:
sida = get_sida(r)
sttp = identity
referer = r.url
_ = elective.sso_login_dual_degree(sida, sttp, referer)
cout.info("Login success (client: %s)" % elective.id)
electivePool.put_nowait(elective)
elective = None
except (ServerError, StatusCodeError) as e:
ferr.error(e)
cout.warning("ServerError/StatusCodeError encountered")
except OperationFailedError as e:
ferr.error(e)
cout.warning("OperationFailedError encountered")
except RequestException as e:
ferr.error(e)
cout.warning("RequestException encountered")
except IAAAException as e:
ferr.error(e)
cout.warning("IAAAException encountered")
except CaughtCheatingError as e:
ferr.critical(e) # 严重错误
shouldQuitImmediately = True
raise e
except ElectiveException as e:
ferr.error(e)
cout.warning("ElectiveException encountered")
except json.JSONDecodeError as e:
ferr.error(e)
cout.warning("JSONDecodeError encountered")
except KeyboardInterrupt as e:
shouldQuitImmediately = True
except Exception as e:
ferr.exception(e)
shouldQuitImmediately = True
raise e
finally:
if shouldQuitImmediately:
shouldKillAllThreads = True
sys.exit(1)
t = loginLoopInterval
cout.info("")
cout.info("IAAA login loop sleep %s s" % t)
cout.info("")
time.sleep(t)
def _thread_main_loop(goals, ignored, status):
loop = 0
elective = None
shouldQuitImmediately = False
shouldEnterNextLoopImmediately = False
global shouldKillAllThreads
def _update_loop():
if status is not None:
status["loop"] = loop
def _ignore_course(course, reason):
ignored.append( (course.to_simplified(), reason) )
_task_setup_pools()
_task_print_header()
while True:
if shouldKillAllThreads:
break
shouldQuitImmediately = False
shouldEnterNextLoopImmediately = False
if not _has_candidates(goals, ignored):
cout.info("No tasks, exit")
break
loop += 1
_update_loop()
cout.info("")
cout.info("======== Loop %d ========" % loop)
cout.info("")
# MARK: print current plans
_task_print_goals(goals, ignored)
_task_print_ignored(ignored)
try:
if elective is None:
elective = electivePool.get()
cout.info("> Current client: %s" % elective.id)
cout.info("")
if not elective.hasLogined:
raise _ElectiveNeedsLogin # quit this loop
# MARK: check supply/cancel page
if page == 1:
cout.info("Get SupplyCancel page %s" % page)
resp = elective.get_SupplyCancel()
tables = get_tables(resp._tree)
elected = get_courses(tables[1])
plans = get_courses_with_detail(tables[0])
else:
#
# 刷新非第一页的课程,第一次请求会遇到返回空页面的情况
#
# 模拟方法:
# 1.先登录辅双,打开补退选第二页
# 2.再在同一浏览器登录主修
# 3.刷新辅双的补退选第二页可以看到
#
# -----------------------------------------------
#
# 引入 retry 逻辑以防止以为某些特殊原因无限重试
# 正常情况下一次就能成功,但是为了应对某些偶发错误,这里设为最多尝试 3 次
#
retry = 3
while True:
if retry == 0:
raise OperationFailedError(msg="unable to get normal Supplement page %s" % page)
cout.info("Get Supplement page %s" % page)
resp = elective.get_supplement(page=page) # 双学位第二页
tables = get_tables(resp._tree)
try:
elected = get_courses(tables[1])
plans = get_courses_with_detail(tables[0])
except IndexError as e:
cout.warning("IndexError encountered")
cout.info("Get SupplyCancel first to prevent empty table returned")
_ = elective.get_SupplyCancel() # 遇到空页面时请求一次补退选主页,之后就可以不断刷新
else:
break
finally:
retry -= 1
# MARK: check available courses
cout.info("Get available courses")
queue = _get_available_courses(goals, plans, elected, ignored)
# MAKR: elect available courses
if len(queue) == 0:
cout.info("No courses available")
continue
while len(queue) > 0:
course = queue.popleft()
cout.info("Try to elect %s" % course)
_task_validate_captcha(elective)
retryRequired = True
while retryRequired:
retryRequired = False
try:
resp = elective.get_ElectSupplement(course.href)
except (ElectionRepeatedError, TimeConflictError) as e:
ferr.error(e)
cout.warning("ElectionRepeatedError encountered")
_ignore_course(course, reason="Repeated")
except TimeConflictError as e:
ferr.error(e)
cout.warning("TimeConflictError encountered")
_ignore_course(course, reason="Time conflict")
except ExamTimeConflictError as e:
ferr.error(e)
cout.warning("ExamTimeConflictError encountered")
_ignore_course(course, reason="Exam time conflict")
except ElectionPermissionError as e:
ferr.error(e)
cout.warning("ElectionPermissionError encountered")
_ignore_course(course, reason="Permission required")
except CreditsLimitedError as e:
ferr.error(e)
cout.warning("CreditsLimitedError encountered")
_ignore_course(course, reason="Credits limited")
except MutuallyExclusiveCourseError as e:
ferr.error(e)
cout.warning("MutuallyExclusiveCourseError encountered")
_ignore_course(course, reason="Mutual exclusive")
except ElectionSuccess as e:
cout.info("%s is ELECTED !" % course) # 不从此处加入 ignored ,而是在下回合根据实际选课结果来决定是否忽略
except ElectionFailedError as e:
ferr.error(e)
cout.warning("ElectionFailedError encountered") # 具体原因不明,且不能马上重试
except Exception as e:
raise e
except NotInCoursePlanException as e:
cout.error(e)
shouldQuitImmediately = True
raise e
except (ServerError, StatusCodeError) as e:
ferr.error(e)
cout.warning("ServerError/StatusCodeError encountered")
except OperationFailedError as e:
ferr.error(e)
cout.warning("OperationFailedError encountered")
except RequestException as e:
ferr.error(e)
cout.warning("RequestException encountered")
except IAAAException as e:
ferr.error(e)
cout.warning("IAAAException encountered")
except _ElectiveNeedsLogin as e:
cout.info("client: %s needs Login" % elective.id)
reloginPool.put_nowait(elective)
elective = None
shouldEnterNextLoopImmediately = True
except (SessionExpiredError, InvalidTokenError, NoAuthInfoError, SharedSessionError) as e:
ferr.error(e)
cout.info("client: %s needs relogin" % elective.id)
reloginPool.put_nowait(elective)
elective = None
shouldEnterNextLoopImmediately = True
except CaughtCheatingError as e:
ferr.critical(e) # 严重错误
shouldQuitImmediately = True
raise e
except SystemException as e:
ferr.error(e)
cout.warning("SystemException encountered")
except TipsException as e:
ferr.error(e)
cout.warning("TipsException encountered")
except OperationTimeoutError as e:
ferr.error(e)
cout.warning("OperationTimeoutError encountered")
except json.JSONDecodeError as e:
ferr.error(e)
cout.warning("JSONDecodeError encountered")
except KeyboardInterrupt as e:
shouldQuitImmediately = True
raise e
except Exception as e:
ferr.exception(e)
shouldQuitImmediately = True
raise e
finally:
if shouldQuitImmediately:
shouldKillAllThreads = True
sys.exit(2)
if elective is not None: # change elective client
electivePool.put_nowait(elective)
elective = None
if shouldEnterNextLoopImmediately:
cout.info("")
cout.info("======== END Loop %d ========" % loop)
cout.info("")
else:
t = _get_refresh_interval()
cout.info("")
cout.info("======== END Loop %d ========" % loop)
cout.info("Main loop sleep %s s" % t)
cout.info("")
time.sleep(t)
def main(goals=None, ignored=None, status=None):
goals = load_course_csv() if goals is None else goals
ignored = [] if ignored is None else ignored # (course, reason)
tList = [
Thread(target=_thread_login_loop, name="Loop-Login", args=(status,)),
Thread(target=_thread_main_loop, name="Loop-Main", args=(goals, ignored, status))
]
for t in tList:
t.daemon = True
t.start()
for t in tList:
t.join()
```
#### File: PKUAutoElective/autoelective/monitor.py
```python
__all__ = ["main"]
import logging
import werkzeug._internal as _werkzeug_internal
from flask import Flask, current_app, jsonify
from flask.logging import default_handler
from .config import AutoElectiveConfig
from .logger import ConsoleLogger
cout = ConsoleLogger("monitor")
ferr = ConsoleLogger("monitor.error")
config = AutoElectiveConfig()
def main(goals, ignored, status):
monitor = Flask(__name__)
# MARK: register routes
@monitor.route("/", methods=["GET"])
@monitor.route("/rules", methods=["GET"])
def root():
rules = []
for r in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
line = "{method} {rule}".format(
method=','.join( m for m in r.methods if m not in ("HEAD","OPTIONS") ),
rule=r.rule
)
rules.append(line)
return jsonify(rules)
@monitor.route("/loop", methods=["GET"])
def loop():
return str(status["loop"])
@monitor.route("/goals", methods=["GET"])
def get_goals():
return jsonify([ str(course) for course in goals ])
@monitor.route("/current",methods=["GET"])
def get_current():
_ignored = [ x[0] for x in ignored ]
return jsonify([ str(course) for course in goals if course not in _ignored ])
@monitor.route("/ignored", methods=["GET"])
def get_ignored():
return jsonify([ "%s %s" % (course, reason) for (course, reason) in ignored ])
@monitor.route("/all", methods=["GET"])
def get_all():
_ignored = [ x[0] for x in ignored ]
return jsonify(
{
"loop": status["loop"],
"goals": [ str(course) for course in goals ],
"current": [ str(course) for course in goals if course not in _ignored ],
"ignored": [ "%s %s" % (course, reason) for (course, reason) in ignored ],
}
)
# MARK: setup monitor
monitor.config["JSON_AS_ASCII"] = False
monitor.config["JSON_SORT_KEYS"] = False
_werkzeug_internal._logger = cout # custom _logger for werkzeug
monitor.logger.removeHandler(default_handler)
for logger in [cout, ferr]:
for handler in logger.handlers:
monitor.logger.addHandler(handler)
monitor.run(
host=config.monitorHost,
port=config.monitorPort,
debug=True,
use_reloader=False,
)
``` |
{
"source": "12f23eddde/tank2s",
"score": 2
} |
#### File: tank2s/geasrc/train.py
```python
import numpy as np
import geatpy as ga
import subprocess,time,sys,math
from multiprocessing import Pool,freeze_support
from tqdm import tqdm
# 调试用变量
DEBUG = True
WRITE_TO_FILE = True
# 全局变量
GENERATION = 0
TOTAL = 0
BATTLECOUNTER = 0
# 本程序依赖Tank2Judge.exe
# Tank2Judge.exe 输入表现型矩阵
# 1 3 4 5 6 7 4 7 4 6 7 8 4 8
# 输出十张地图对局中双方的得分(输0 平1 赢2)
# 2 2
# modified
def sga_real_templet(AIM_M, AIM_F, PUN_M, PUN_F, FieldDR, problem, maxormin, MAXGEN, NIND, SUBPOP, GGAP, selectStyle, recombinStyle, recopt, pm, distribute, drawing = 1, _init = None, _read_from_file = False):
"""
sga_real_templet.py - 单目标编程模板(实值编码)
语法:
该函数除了参数drawing外,不设置可缺省参数。当某个参数需要缺省时,在调用函数时传入None即可。
比如当没有罚函数时,则在调用编程模板时将第3、4个参数设置为None即可,如:
sga_real_templet(AIM_M, 'aimfuc', None, None, ..., maxormin)
输入参数:
AIM_M - 目标函数的地址,由AIM_M = __import__('目标函数所在文件名')语句得到
目标函数规范定义:[f,LegV] = aimfuc(Phen,LegV)
其中Phen是种群的表现型矩阵, LegV为种群的可行性列向量,f为种群的目标函数值矩阵
AIM_F : str - 目标函数名
PUN_M - 罚函数的地址,由PUN_M = __import__('罚函数所在文件名')语句得到
罚函数规范定义: newFitnV = punishing(LegV, FitnV)
其中LegV为种群的可行性列向量, FitnV为种群个体适应度列向量
一般在罚函数中对LegV为0的个体进行适应度惩罚,返回修改后的适应度列向量newFitnV
PUN_F : str - 罚函数名
FieldDR : array - 实际值种群区域描述器
[lb; (float) 指明每个变量使用的下界
ub] (float) 指明每个变量使用的上界
注:不需要考虑是否包含变量的边界值。在crtfld中已经将是否包含边界值进行了处理
本函数生成的矩阵的元素值在FieldDR的[下界, 上界)之间
problem : str - 表明是整数问题还是实数问题,'I'表示是整数问题,'R'表示是实数问题
maxormin int - 最小最大化标记,1表示目标函数最小化;-1表示目标函数最大化
MAXGEN : int - 最大遗传代数
NIND : int - 种群规模,即种群中包含多少个个体
SUBPOP : int - 子种群数量,即对一个种群划分多少个子种群
GGAP : float - 代沟,表示子代与父代染色体及性状不相同的概率
selectStyle : str - 指代所采用的低级选择算子的名称,如'rws'(轮盘赌选择算子)
recombinStyle: str - 指代所采用的低级重组算子的名称,如'xovsp'(单点交叉)
recopt : float - 交叉概率
pm : float - 重组概率
distribute : bool - 是否增强种群的分布性(可能会造成收敛慢)
drawing : int - (可选参数),0表示不绘图,1表示绘制最终结果图。默认drawing为1
输出参数:
pop_trace : array - 种群进化记录器(进化追踪器),
第0列记录着各代种群最优个体的目标函数值
第1列记录着各代种群的适应度均值
第2列记录着各代种群最优个体的适应度值
var_trace : array - 变量记录器,记录着各代种群最优个体的变量值,每一列对应一个控制变量
times : float - 进化所用时间
模板使用注意:
1.本模板调用的目标函数形如:[ObjV,LegV] = aimfuc(Phen,LegV),
其中Phen表示种群的表现型矩阵, LegV为种群的可行性列向量(详见Geatpy数据结构)
2.本模板调用的罚函数形如: newFitnV = punishing(LegV, FitnV),
其中FitnV为用其他算法求得的适应度
若不符合上述规范,则请修改算法模板或自定义新算法模板
3.关于'maxormin': geatpy的内核函数全是遵循“最小化目标”的约定的,即目标函数值越小越好。
当需要优化最大化的目标时,需要设置'maxormin'为-1。
本算法模板是正确使用'maxormin'的典型范例,其具体用法如下:
当调用的函数传入参数包含与“目标函数值矩阵”有关的参数(如ObjV,ObjVSel,NDSetObjV等)时,
查看该函数的参考资料(可用'help'命令查看,也可到官网上查看相应的教程),
里面若要求传入前对参数乘上'maxormin',则需要乘上。
里面若要求对返回参数乘上'maxormin'进行还原,
则调用函数返回得到的相应参数需要乘上'maxormin'进行还原,否则其正负号就会被改变。
CHANGE:
1._init为参数初始值(可选):若init为None,则所有Chrom均随机生成,否则则将init添加至初始Chrom
2.修改了重插入部分
3.允许通过read_from_file保存种群
"""
"""==========================初始化配置==========================="""
# 获取目标函数和罚函数
aimfuc = getattr(AIM_M, AIM_F) # 获得目标函数
if PUN_F is not None:
punishing = getattr(PUN_M, PUN_F) # 获得罚函数
NVAR = FieldDR.shape[1] # 得到控制变量的个数
# 定义进化记录器,初始值为nan
pop_trace = (np.zeros((MAXGEN ,2)) * np.nan)
# 定义变量记录器,记录控制变量值,初始值为nan
var_trace = (np.zeros((MAXGEN ,NVAR)) * np.nan)
ax = None # 存储上一帧图形
repnum = 0 # 初始化重复个体数为0
"""=========================开始遗传算法进化======================="""
if problem == 'R':
if _read_from_file is True:
Chrom = np.loadtxt('chrom')
elif _init is None:
Chrom = ga.crtrp(NIND, FieldDR) # 生成初始种群
else:
Chrom = np.vstack([ga.crtrp(NIND-1, FieldDR),_init])
elif problem == 'I':
if _read_from_file is True:
Chrom = np.loadtxt('chrom')
elif _init is None:
Chrom = ga.crtip(NIND, FieldDR) # 生成初始种群
else:
Chrom = np.vstack([ga.crtip(NIND-1, FieldDR),_init])
LegV = np.ones((NIND, 1)) # 初始化种群的可行性列向量
[ObjV, LegV] = aimfuc(Chrom, LegV) # 求种群的目标函数值
gen = 0
badCounter = 0 # 用于记录在“遗忘策略下”被忽略的代数
# 开始进化!!
start_time = time.time() # 开始计时
while gen < MAXGEN:
if badCounter >= 10 * MAXGEN: # 若多花了10倍的迭代次数仍没有可行解出现,则跳出
break # CHANGE
FitnV = ga.powing(maxormin * ObjV, LegV, None, SUBPOP) # CHANGE
if PUN_F is not None:
FitnV = punishing(LegV, FitnV) # 调用罚函数
# 记录进化过程
bestIdx = np.argmax(FitnV) # 获取最优个体的下标
if LegV[bestIdx] != 0:
feasible = np.where(LegV != 0)[0] # 排除非可行解
pop_trace[gen,0] = np.sum(ObjV[feasible]) / ObjV[feasible].shape[0] # 记录种群个体平均目标函数值
pop_trace[gen,1] = ObjV[bestIdx] # 记录当代目标函数的最优值
var_trace[gen,:] = Chrom[bestIdx, :] # 记录当代最优的控制变量值
repnum = len(np.where(ObjV[np.argmax(FitnV)] == ObjV)[0]) # 计算最优个体重复数
# 绘制动态图
if drawing == 2:
ax = ga.sgaplot(pop_trace[:,[1]],'种群最优个体目标函数值', False, ax, gen)
badCounter = 0 # badCounter计数器清零
if WRITE_TO_FILE is True:
np.savetxt('chrom',Chrom)
with open('temp.txt', 'a') as ftemp:
ftemp.write('>>> Generation '+ str(gen) +'\n')
ftemp.write('Best Value = ' + str(ObjV[bestIdx])+'\n')
ftemp.write('Best Chrom = ')
ftemp.writelines(np.array2string(Chrom[bestIdx, :], separator=','))
ftemp.write('\n')
ftemp.close()
else:
gen -= 1 # 忽略这一代(遗忘策略)
badCounter += 1
if distribute == True: # 若要增强种群的分布性(可能会造成收敛慢)
idx = np.argsort(ObjV[:, 0], 0)
dis = np.diff(ObjV[idx,0]) / (np.max(ObjV[idx,0]) - np.min(ObjV[idx,0]) + 1)# 差分计算距离的修正偏移量
dis = np.hstack([dis, dis[-1]])
dis = dis + np.min(dis) # 修正偏移量+最小量=修正绝对量
FitnV[idx, 0] *= np.exp(dis) # 根据相邻距离修改适应度,突出相邻距离大的个体,以增加种群的多样性
# 进行遗传算子
SelCh=ga.selecting(selectStyle, Chrom, FitnV, GGAP, SUBPOP) # 选择
SelCh=ga.recombin(recombinStyle, SelCh, recopt, SUBPOP) # 对所选个体进行重组
if problem == 'R':
SelCh=ga.mutbga(SelCh,FieldDR, pm) # 变异
if repnum > Chrom.shape[0] * 0.01: # 当最优个体重复率高达1%时,进行一次高斯变异
SelCh=ga.mutgau(SelCh, FieldDR, pm) # 高斯变异
elif problem == 'I':
SelCh=ga.mutint(SelCh, FieldDR, pm)
LegVSel = np.ones((SelCh.shape[0], 1)) # 初始化育种种群的可行性列向量
# CHANGE
currCh = np.vstack([Chrom,SelCh])
currLegV = np.vstack([LegV,LegVSel])
[resObjV, resLegV] = aimfuc(currCh, currLegV) # 求种群的目标函数值
[ObjV,ObjVSel] = np.split(resObjV,[len(ObjV)])
resFitnV = ga.ranking(maxormin * resObjV, resLegV, None, SUBPOP) # 计算育种种群的适应度
[FitnV,FitnVSel] = np.split(resFitnV,[len(FitnV)])
if PUN_F is not None:
FitnVSel = punishing(LegVSel, FitnVSel) # 调用罚函数
# 重插入
[Chrom, ObjV, LegV] = ga.reins(Chrom, SelCh, SUBPOP, 1, 1, FitnV, FitnVSel, ObjV, ObjVSel, LegV, LegVSel)
gen += 1
if DEBUG is True:
for i in range(len(ObjV)):
print('aimfunc =', ObjV[i], end = ' ')
print('Chrom =', Chrom[i])
end_time = time.time() # 结束计时
times = end_time - start_time
# 后处理进化记录器
delIdx = np.where(np.isnan(pop_trace))[0]
pop_trace = np.delete(pop_trace, delIdx, 0)
var_trace = np.delete(var_trace, delIdx, 0)
if pop_trace.shape[0] == 0:
raise RuntimeError('error: no feasible solution. (有效进化代数为0,没找到可行解。)')
# 绘图
if drawing != 0:
ga.trcplot(pop_trace, [['种群个体平均目标函数值', '种群最优个体目标函数值']])
# 输出结果
if maxormin == 1:
best_gen = np.argmin(pop_trace[MAXGEN - 1, 1]) # 记录最优种群是在哪一代
best_ObjV = np.min(pop_trace[MAXGEN - 1, 1])
elif maxormin == -1:
best_gen = MAXGEN - 1 # 记录最优种群是在哪一代
best_ObjV = np.max(pop_trace[MAXGEN - 1, 1])
print('最优的目标函数值为:%s'%(best_ObjV))
print('最优的控制变量值为:')
for i in range(NVAR):
print(var_trace[MAXGEN - 1, i], end = ',')
print('有效进化代数:%s'%(pop_trace.shape[0]))
print('最优的一代是第 %s 代'%(best_gen+1))
print('时间已过 %s 秒'%(times))
# 返回进化记录器、变量记录器以及执行时间
ftemp.close()
return [pop_trace, var_trace, times]
def battle(_para): # para_A,para_B均为单行向量 返回值为十局中得分(List)
args = ''
for it in _para[0]:
args += ' ' + str(it)
for it in _para[1]:
args += ' ' + str(it)
# if DEBUG is True:
# print("[DEBUG] Judge arguments =",args)
curr_input = args.encode('utf-8')
curr_process = subprocess.run('190522_Tank2S_Judge.exe', stdout=subprocess.PIPE, input=curr_input)
curr_output = curr_process.stdout.decode('utf-8') # bytes to string
if DEBUG is True:
print("[DEBUG] Judge output = ",curr_output)
curr_answ = curr_output.split()
res = [float(curr_answ[0]), float(curr_answ[1])]
return res
def aimfunc(Phen,LegV): # 以十局平均得分作为目标函数值
global GENERATION,TOTAL
row_count = Phen.shape[0]
print('Generation',GENERATION)
_pool = Pool()
_battle_list = []
for i in range(row_count-1): # 单循环赛
for j in range(i+1,row_count):
_battle_list.append([Phen[i,:],Phen[j,:]])
# if(DEBUG):
# print('BattleList:', _battle_list)
_battle_res = list(tqdm(_pool.imap(battle,_battle_list),total = len(_battle_list),unit = 'battle'))
counter = 0
f = np.zeros((row_count,1))
for i in range(row_count-1): # 单循环赛
for j in range(i+1,row_count):
f[i] += _battle_res[counter][0]
f[j] += _battle_res[counter][1]
counter+=1
f /= row_count
GENERATION += 1
print('BattleRes:',f.T)
return [f,LegV]
if __name__ == '__main__':
# For Windows Platform
freeze_support()
# 获取函数接口地址
AIM_M = __import__('train')
# 调试变量
parameter_count = 6 # 需要训练的参数数量
init = [2,1.2,0.3,2.0,5,0.5] # 参数初始值
upper_bound = np.array([5,2.4,0.6,5,10,1.2])
lower_bound = np.array([0.5,0.6,0.1,1,2.5,0.2])
# 遗传算法参数
MAXGEN = 50 # 最大遗传代数
NIND = 10 # 种群规模 (必须是子种群数量的N倍)
SUBPOP = 1 # 子种群数量
GGAP = 0.5 # 代沟,本模板中该参数为无用参数
selectStyle = 'etour' # 低级选择算子
recombinStyle = 'xovsp' # 低级重组算子
recopt = 0.9 # 交叉概率
pm = 0.1 # 重组概率
distribute = False # 是否增强种群的分布性(可能会造成收敛慢)
# 初始化变量
ranges = np.vstack([lower_bound*np.ones((1,parameter_count)), upper_bound*np.ones((1,parameter_count))]) # CHANGE
borders = np.vstack([np.zeros((1,parameter_count)), np.zeros((1,parameter_count))]) # bu包含边界
precisions = [3] * parameter_count # 在二进制/格雷码编码中代表自变量的编码精度,当控制变量是连续型时,根据crtfld参考资料,该变量只表示边界精度,故设置为一定的正数即可
# 生成区域描述器
FieldDR = ga.crtfld(ranges, borders, precisions)
if DEBUG is True:
print('FieldDR :', FieldDR)
# 多种群竞争进化 实值编码 最大化目标
[pop_trace, var_trace, times] = sga_real_templet(AIM_M, 'aimfunc', None, None, FieldDR, 'R', -1, MAXGEN, NIND, SUBPOP, GGAP, selectStyle, recombinStyle, recopt, pm, distribute, 1, _init=init)
``` |
{
"source": "12HuYang/GridFree",
"score": 3
} |
#### File: 12HuYang/GridFree/axistest.py
```python
from tkinter import *
loccanvas=None
minx=0
maxx=0
totalbins=0
linelocs=[0,0]
bins=None
# def cal_xvalue(x):
# print(maxx,minx,max(bins),min(bins))
# binwidth=int(maxx-minx)/(max(bins)-min(bins))
# print(x,minx,binwidth)
# xloc=int((x-minx)/binwidth)
# print(xloc)
# #value=min(bins)+xloc*binwidth
# return xloc
#
#
#
# def item_enter(event):
# global loccanvas
# loccanvas.config(cursor='hand2')
# loccanvas._restorItem=None
# loccanvas._restoreOpts=None
# itemType=loccanvas.type(CURRENT)
# #print(itemType)
#
# pass
#
# def item_leave(event):
# global loccanvas
# pass
#
# def item_start_drag(event):
# global loccanvas,linelocs
# itemType=loccanvas.type(CURRENT)
# print(itemType)
# if itemType=='line':
# fill=loccanvas.itemconfigure(CURRENT,'fill')[4]
# if fill=='red':
# loccanvas._lastX=event.x
# #loccanvas._lastY=event.y
# linelocs[0]=event.x
# else:
# if fill=='orange':
# loccanvas._lastX=event.x
# #loccanvas._lastY=event.y
# linelocs[1]=event.x
# else:
# loccanvas._lastX=None
# else:
# loccanvas._lastX=None
# pass
#
# def item_drag(event):
# global loccanvas,linelocs
# x=event.x
# y=event.y
# if x<minx:
# x=minx
# if x>maxx:
# x=maxx
# try:
# fill=loccanvas.itemconfigure(CURRENT,'fill')[4]
# except:
# return
# #itemType=loccanvas.type(CURRENT)
# try:
# test=0-loccanvas._lastX
# except:
# return
# loccanvas.move(CURRENT,x-loccanvas._lastX,0)
# loccanvas._lastX=x
# if fill=='red':
# linelocs[0]=x
# if fill=='orange':
# linelocs[1]=x
# #print(line_a)
# #print(minline)
# #print(maxline)
# print(cal_xvalue(linelocs[0]),cal_xvalue(linelocs[1]))
#
# pass
def drawdots(ulx,uly,rlx,rly,x_bins,y_bins,datalist,canvas,inputfigdotlist):
global loccanvas,minx,maxx,totalbins,bins,linelocs
loccanvas=canvas
minx=ulx
maxx=rlx
canvas.create_text(int(ulx+(rlx-ulx)/2),rly-20,text='By size',font=('Times',14),anchor=N)
canvas.create_line(ulx,uly,rlx,uly,width=2)
canvas.create_line(ulx,uly,ulx,rly,width=2)
canvas.create_line(ulx,rly,rlx,rly,width=2)
canvas.create_line(rlx,uly,rlx,rly,width=2)
vlinelocs=[ulx,rlx]
hlinelocs=[rly,uly]
canvas.create_text(ulx-25-10,int(uly/2)+25,text='\n'.join('Shape'),font=('Times',12),anchor=E)
canvas.create_text(int(rlx/2)+50,uly+30,text='Pixels',font=('Times',12),anchor=N)
xbinwidth=(rlx-ulx-50)/(len(x_bins)-1)
for i in range(len(x_bins)):
x=ulx+(i*xbinwidth)
canvas.create_line(x+25,uly+5,x+25,uly,width=2)
canvas.create_text(x+25,uly+6,text='%d'%(x_bins[i]),font=('Times',12),anchor=N)
ybinwidth=(uly-rly-50)/(len(y_bins)-1)
for i in range(len(y_bins)):
y=uly-(i*ybinwidth)
canvas.create_line(ulx-5,y-25,ulx,y-25,width=2)
canvas.create_text(ulx-6,y-25,text='%d'%(y_bins[i]),font=('Times',12),anchor=E)
for (xs,ys) in datalist:
a=canvas.create_oval(xs-1,ys-1,xs+1,ys+1,width=1,outline='black',fill='SkyBlue')
inputfigdotlist.update({(xs,ys):a})
canvas.create_line(ulx+12,rly,ulx+12,uly,arrow=LAST,fill='red',width=2,dash=(5,1))
canvas.create_line(rlx-12,rly,rlx-12,uly,arrow=LAST,fill='red',width=2)
canvas.create_line(ulx,rly+12,rlx,rly+12,arrow=FIRST,fill='blue',width=2)
canvas.create_line(ulx,uly-12,rlx,uly-12,arrow=FIRST,fill='blue',width=2,dash=(5,1))
def drawPlot(ulx,uly,rlx,rly,hist,bin_edges,canvas):
global loccanvas,minx,maxx,totalbins,bins,linelocs
loccanvas=canvas
# The window is an object of type tk
#root = Tk()
#root.title('Simple Plot')
# A canvas object is something you can draw on
# we put it into the root window
#canvas = Canvas(root, width=400, height=300, bg = 'white')
# figures out how the canvas sits in the window
#canvas.pack()
# draw x and y axes
minx=ulx
maxx=rlx
linelocs=[minx,maxx]
totalbins=len(bin_edges)
bins=bin_edges
canvas.create_line(ulx,uly,rlx,uly, width=2)
canvas.create_line(ulx,uly,ulx,rly, width=2)
# markings on x axis
binwidth=(rlx-ulx)/(len(hist))
for i in range(len(bin_edges)):
x = ulx + (i * binwidth)
canvas.create_line(x,uly+5,x,uly, width=2)
canvas.create_text(x,uly+5, text='%d'% (bin_edges[i]), font=('Times',12),anchor=N)
# markings on y axis
maxhist=max(hist)
histwidth=(uly-rly)/maxhist
histbreak=int(maxhist/10)
for i in range(maxhist):
y = uly - (i * histwidth)
if i%histbreak==0:
canvas.create_line(ulx-5,y,ulx,y, width=2)
canvas.create_text(ulx-6,y, text='%d'% (i), font=('Times',12),anchor=E)
if i==maxhist-1 and i%histbreak!=0:
canvas.create_line(ulx-5,y,ulx,y, width=2)
canvas.create_text(ulx-6,y, text='%d'% (i), font=('Times',12),anchor=E)
canvas.create_line(ulx,rly,ulx,uly,arrow=LAST,fill='red',width=2)
#minline=canvas.create_text(ulx,rly-5,text='%d'% 0,fill='red',font=('Times',12))
canvas.create_line(rlx,rly,rlx,uly,arrow=LAST,fill='orange',width=2)
#maxline=canvas.create_text(rlx,rly-5,text='%d'% max(bin_edges),fill='red',font=('Times',12))
#canvas.bind('<Any-Enter>',item_enter)
#canvas.bind('<Any-Leave>',item_leave)
#canvas.bind('<Button-1>',item_start_drag)
#canvas.bind('<B1-Motion>',item_drag)
# rescale the input data so it matches the axes
## scaled = []
## for (x,y) in dataList:
## scaled.append((100 + 3*x, 250 - (4*y)/5))
# draw the wiggly line
#canvas.create_line(dataList, fill='black')
# and some dots at the corner points
#for (xs,ys) in dataList:
# canvas.create_oval(xs-6,ys-6,xs+6,ys+6, width=1,
# outline='black', fill='SkyBlue2')
# display window and wait for it to close
#root.mainloop()
def main():
# detect if this is being run by itself or because it was imported
if __name__ != "__main__":
return
# some meaningless x-y data points to plot
# the input data is in the range x = 0 to 100, and y = 0 to 250.
originalData = [(12, 56), (20, 94), (33, 98), (45, 120), (61, 180),
(75, 160), (98, 223)]
# rescale the data to lie in the graph range x = 100 to 400, y = 250 to 50
# remember y is zero at the top of the window.
scaledDataList = []
for (x,y) in originalData:
scaledDataList.append((100 + 3*x, 250 - (4*y)/5))
drawPlot(scaledDataList)
if __name__ == '__main__' :
main()
```
#### File: 12HuYang/GridFree/batchprocess.py
```python
import tkinter.filedialog as filedialog
from tkinter import messagebox
from PIL import Image,ImageDraw,ImageFont
from PIL import ImageTk,ImageGrab
import cv2
from skimage import filters
import matplotlib.pyplot as pyplt
import numpy as np
from sklearn.cluster import KMeans
import tkintercorestat
import tkintercore
import cal_kernelsize
import os
import csv
import scipy.linalg as la
import multiprocessing
import time
#from multiprocessing import Process
batch_colorbandtable=np.array([[255,0,0],[255,127,0],[255,255,0],[127,255,0],[0,255,255],[0,127,255],[0,0,255],[127,0,255],[75,0,130],[255,0,255]],'uint8')
class batch_img():
def __init__(self,size,bands):
self.size=size
self.bands=bands
class batch_ser_func():
def __init__(self,filename):
self.file=filename
self.folder=FOLDER
self.exportpath=exportpath
self.batch_Multiimage={}
self.batch_Multigray={}
self.batch_Multitype={}
self.batch_Multiimagebands={}
self.batch_Multigraybands={}
self.batch_displaybandarray={}
self.batch_originbandarray={}
self.batch_originpcabands={}
self.batch_colordicesband={}
self.batch_results={}
self.kernersizes={}
self.reseglabels=None
self.displayfea_l=0
self.displayfea_w=0
self.RGB_vector=None
self.colorindex_vector=None
self.displaypclagels=None
def Open_batchimage(self):
try:
Filersc=cv2.imread(self.folder+'/'+self.file,flags=cv2.IMREAD_ANYCOLOR)
height,width,channel=np.shape(Filersc)
Filesize=(height,width)
print('filesize:',height,width)
RGBfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2RGB)
Grayfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2Lab)
Grayfile=cv2.cvtColor(Grayfile,cv2.COLOR_BGR2GRAY)
Grayimg=batch_img(Filesize,Grayfile)
RGBbands=np.zeros((channel,height,width))
for j in range(channel):
band=RGBfile[:,:,j]
band=np.where(band==0,1e-6,band)
RGBbands[j,:,:]=band
RGBimg=batch_img(Filesize,RGBbands)
tempdict={self.file:RGBimg}
self.batch_Multiimagebands.update(tempdict)
tempdict={self.file:Grayfile}
self.batch_Multigray.update(tempdict)
tempdict={self.file:0}
self.batch_Multitype.update(tempdict)
tempdict={self.file:Grayimg}
self.batch_Multigraybands.update(tempdict)
except:
# messagebox.showerror('Invalid Image Format','Cannot open '+filename)
return False
return True
def fillbands(self,originbands,displaybands,vector,vectorindex,name,band):
tempdict={name:band}
if name not in originbands:
originbands.update(tempdict)
image=cv2.resize(band,(self.displayfea_w,self.displayfea_l),interpolation=cv2.INTER_LINEAR)
displaydict={name:image}
displaybands.update(displaydict)
fea_bands=image.reshape((self.displayfea_l*self.displayfea_w),1)[:,0]
vector[:,vectorindex]=vector[:,vectorindex]+fea_bands
return
def singleband(self):
try:
bands=self.batch_Multiimagebands[self.file].bands
except:
return
channel,fea_l,fea_w=bands.shape
print('bandsize',fea_l,fea_w)
if fea_l*fea_w>2000*2000:
ratio=batch_findratio([fea_l,fea_w],[2000,2000])
else:
ratio=1
print('ratio',ratio)
originbands={}
displays={}
displaybands=cv2.resize(bands[0,:,:],(int(fea_w/ratio),int(fea_l/ratio)),interpolation=cv2.INTER_LINEAR)
displayfea_l,displayfea_w=displaybands.shape
self.RGB_vector=np.zeros((displayfea_l*displayfea_w,3))
self.colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
self.displayfea_l,self.displayfea_w=displaybands.shape
self.RGB_vectorr=np.zeros((displayfea_l*displayfea_w,3))
self.colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
Red=bands[0,:,:]
Green=bands[1,:,:]
Blue=bands[2,:,:]
self.fillbands(originbands,displays,self.RGB_vector,0,'Band1',Red)
self.fillbands(originbands,displays,self.RGB_vector,1,'Band2',Green)
self.fillbands(originbands,displays,self.RGB_vector,2,'Band3',Blue)
#secondsmallest_R=np.partition(Red,1)[1][0]
#secondsmallest_G=np.partition(Green,1)[1][0]
#secondsmallest_B=np.partition(Blue,1)[1][0]
#Red=Red+secondsmallest_R
#Green=Green+secondsmallest_G
#Blue=Blue+secondsmallest_B
PAT_R=Red/(Red+Green)
PAT_G=Green/(Green+Blue)
PAT_B=Blue/(Blue+Red)
ROO_R=Red/Green
ROO_G=Green/Blue
ROO_B=Blue/Red
DIF_R=2*Red-Green-Blue
DIF_G=2*Green-Blue-Red
DIF_B=2*Blue-Red-Green
GLD_R=Red/(np.multiply(np.power(Blue,0.618),np.power(Green,0.382)))
GLD_G=Green/(np.multiply(np.power(Blue,0.618),np.power(Red,0.382)))
GLD_B=Blue/(np.multiply(np.power(Green,0.618),np.power(Red,0.382)))
self.fillbands(originbands,displays,self.colorindex_vector,0,'PAT_R',PAT_R)
self.fillbands(originbands,displays,self.colorindex_vector,1,'PAT_G',PAT_G)
self.fillbands(originbands,displays,self.colorindex_vector,2,'PAT_B',PAT_B)
self.fillbands(originbands,displays,self.colorindex_vector,3,'ROO_R',ROO_R)
self.fillbands(originbands,displays,self.colorindex_vector,4,'ROO_G',ROO_G)
self.fillbands(originbands,displays,self.colorindex_vector,5,'ROO_B',ROO_B)
self.fillbands(originbands,displays,self.colorindex_vector,6,'DIF_R',DIF_R)
self.fillbands(originbands,displays,self.colorindex_vector,7,'DIF_G',DIF_G)
self.fillbands(originbands,displays,self.colorindex_vector,8,'DIF_B',DIF_B)
self.fillbands(originbands,displays,self.colorindex_vector,9,'GLD_R',GLD_R)
self.fillbands(originbands,displays,self.colorindex_vector,10,'GLD_G',GLD_G)
self.fillbands(originbands,displays,self.colorindex_vector,11,'GLD_B',GLD_B)
NDI=128*((Green-Red)/(Green+Red)+1)
VEG=Green/(np.power(Red,0.667)*np.power(Blue,(1-0.667)))
Greenness=Green/(Green+Red+Blue)
CIVE=0.44*Red+0.811*Green+0.385*Blue+18.7845
MExG=1.262*Green-0.844*Red-0.311*Blue
NDRB=(Red-Blue)/(Red+Blue)
NGRDI=(Green-Red)/(Green+Red)
colorindex_vector=np.zeros((displayfea_l*displayfea_w,7))
self.fillbands(originbands,displays,colorindex_vector,0,'NDI',NDI)
self.fillbands(originbands,displays,colorindex_vector,1,'VEG',VEG)
self.fillbands(originbands,displays,colorindex_vector,2,'Greenness',Greenness)
self.fillbands(originbands,displays,colorindex_vector,3,'CIVE',CIVE)
self.fillbands(originbands,displays,colorindex_vector,4,'MExG',MExG)
self.fillbands(originbands,displays,colorindex_vector,5,'NDRB',NDRB)
self.fillbands(originbands,displays,colorindex_vector,6,'NGRDI',NGRDI)
rgb_M=np.mean(self.RGB_vector.T,axis=1)
colorindex_M=np.mean(self.colorindex_vector.T,axis=1)
print('rgb_M',rgb_M,'colorindex_M',colorindex_M)
rgb_C=self.RGB_vector-rgb_M
colorindex_C=self.colorindex_vector-colorindex_M
rgb_V=np.corrcoef(rgb_C.T)
color_V=np.corrcoef(colorindex_C.T)
rgb_std=rgb_C/np.std(self.RGB_vector.T,axis=1)
color_std=colorindex_C/np.std(self.colorindex_vector.T,axis=1)
rgb_eigval,rgb_eigvec=np.linalg.eig(rgb_V)
color_eigval,color_eigvec=np.linalg.eig(color_V)
print('rgb_eigvec',rgb_eigvec)
print('color_eigvec',color_eigvec)
featurechannel=14
pcabands=np.zeros((self.colorindex_vector.shape[0],featurechannel))
for i in range(3):
pcn=rgb_eigvec[:,i]
pcnbands=np.dot(rgb_std,pcn)
pcvar=np.var(pcnbands)
print('rgb pc',i+1,'var=',pcvar)
pcabands[:,i]=pcabands[:,i]+pcnbands
pcabands[:,1]=np.copy(pcabands[:,2])
pcabands[:,2]=pcabands[:,2]*0
for i in range(2,featurechannel):
pcn=color_eigvec[:,i-2]
pcnbands=np.dot(color_std,pcn)
pcvar=np.var(pcnbands)
print('color index pc',i-1,'var=',pcvar)
pcabands[:,i]=pcabands[:,i]+pcnbands
displayfea_vector=np.concatenate((self.RGB_vector,self.colorindex_vector),axis=1)
self.batch_originpcabands.update({self.file:displayfea_vector})
pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
tempdictdisplay={'LabOstu':pcabandsdisplay}
self.batch_displaybandarray.update({self.file:tempdictdisplay})
self.batch_originbandarray.update({self.file:originbands})
def singleband_oldversion(self):
try:
bands=self.batch_Multigraybands[self.file].bands
except:
return
bandsize=self.batch_Multigraybands[self.file].size
print('bandsize',bandsize)
try:
channel,height,width=bands.shape
except:
channel=0
if channel>1:
bands=bands[0,:,:]
ostu=filters.threshold_otsu(bands)
bands=bands.astype('float32')
bands=bands/ostu
if bandsize[0]*bandsize[1]>2000*2000:
ratio=batch_findratio([bandsize[0],bandsize[1]],[2000,2000])
else:
ratio=1
print('ratio',ratio)
originbands={}
displays={}
fea_l,fea_w=bands.shape
# fea_vector=np.zeros((fea_l*fea_w,10))
# pyplt.imsave('batch_bands.png',bands)
displaybands=cv2.resize(bands,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
# pyplt.imsave('batch_displaybands.png',displaybands)
displayfea_l,displayfea_w=displaybands.shape
self.displayfea_l,self.displayfea_w=displaybands.shape
fea_vector=np.zeros((displayfea_l*displayfea_w,3))
displayfea_vector=np.zeros((displayfea_l*displayfea_w,7))
colorfea_vector=np.zeros((displayfea_l*displayfea_w,7))
if 'LabOstu' not in originbands:
originbands.update({'LabOstu':bands})
fea_bands=bands.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=displaybands.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,9]=fea_vector[:,0]+fea_bands
displayfea_vector[:,6]=displayfea_vector[:,6]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,6]=colorfea_vector[:,6]+colorfeabands
displays.update({'LabOstu':displaybands})
bands=self.batch_Multiimagebands[self.file].bands
NDI=128*((bands[1,:,:]-bands[0,:,:])/(bands[1,:,:]+bands[0,:,:])+1)
tempdict={'NDI':NDI}
if 'NDI' not in originbands:
originbands.update(tempdict)
displaybands=cv2.resize(NDI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
fea_bands=NDI.reshape(fea_l*fea_w,1)[:,0]
# originfea_vector[:,1]=originfea_vector[:,1]+fea_bands
displayfea_bands=displaybands.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,1]=fea_vector[:,1]+fea_bands
displayfea_vector[:,1]=displayfea_vector[:,1]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,6]=colorfea_vector[:,6]+colorfeabands
displaydict={'NDI':displaybands}
displays.update(displaydict)
Red=bands[0,:,:]
Green=bands[1,:,:]
Blue=bands[2,:,:]
tempdict={'Band1':Red}
if 'Band1' not in originbands:
originbands.update(tempdict)
image=cv2.resize(Red,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band1':image}
displays.update(displaydict)
fea_bands=Red.reshape(fea_l*fea_w,1)[:,0]
# originfea_vector[:,2]=originfea_vector[:,2]+fea_bands
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,0]=fea_vector[:,0]+displayfea_bands
# displayfea_vector[:,2]=displayfea_vector[:,2]+displayfea_bands
tempdict={'Band2':Green}
if 'Band2' not in originbands:
originbands.update(tempdict)
image=cv2.resize(Green,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band2':image}
displays.update(displaydict)
fea_bands=Green.reshape(fea_l*fea_w,1)[:,0]
# originfea_vector[:,3]=originfea_vector[:,3]+fea_bands
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,1]=fea_vector[:,1]+displayfea_bands
# displayfea_vector[:,3]=displayfea_vector[:,3]+displayfea_bands
tempdict={'Band3':Blue}
if 'Band3' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,4]=originfea_vector[:,4]+Blue
image=cv2.resize(Blue,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band3':image}
displays.update(displaydict)
fea_bands=Blue.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,2]=fea_vector[:,2]+displayfea_bands
# displayfea_vector[:,4]=displayfea_vector[:,4]+displayfea_bands
Greenness = bands[1, :, :] / (bands[0, :, :] + bands[1, :, :] + bands[2, :, :])
tempdict = {'Greenness': Greenness}
if 'Greenness' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,5]=originfea_vector[:,5]+Greenness
image=cv2.resize(Greenness,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
displaydict={'Greenness':image}
#displaybandarray.update(worktempdict)
displays.update(displaydict)
fea_bands=Greenness.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,5]=fea_vector[:,5]+fea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,2]=colorfea_vector[:,2]+colorfeabands
displayfea_vector[:,2]=displayfea_vector[:,2]+displayfea_bands
VEG=bands[1,:,:]/(np.power(bands[0,:,:],0.667)*np.power(bands[2,:,:],(1-0.667)))
tempdict={'VEG':VEG}
if 'VEG' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,6]=originfea_vector[:,6]+VEG
image=cv2.resize(VEG,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
kernel=np.ones((4,4),np.float32)/16
#displaybandarray.update({'LabOstu':})
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'VEG':cv2.filter2D(image,-1,kernel)}
displays.update(worktempdict)
fea_bands=VEG.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,6]=fea_vector[:,6]+fea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,3]=colorfea_vector[:,3]+colorfeabands
displayfea_vector[:,3]=displayfea_vector[:,3]+displayfea_bands
CIVE=0.441*bands[0,:,:]-0.811*bands[1,:,:]+0.385*bands[2,:,:]+18.78745
tempdict={'CIVE':CIVE}
if 'CIVE' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,7]=originfea_vector[:,7]+CIVE
image=cv2.resize(CIVE,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'CIVE':image}
displays.update(worktempdict)
fea_bands=CIVE.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,7]=fea_vector[:,7]+fea_bands
displayfea_vector[:,4]=displayfea_vector[:,4]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,4]=colorfea_vector[:,4]+colorfeabands
MExG=1.262*bands[1,:,:]-0.884*bands[0,:,:]-0.311*bands[2,:,:]
tempdict={'MExG':MExG}
if 'MExG' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,8]=originfea_vector[:,8]+MExG
image=cv2.resize(MExG,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'MExG':image}
displays.update(worktempdict)
fea_bands=MExG.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,8]=fea_vector[:,8]+fea_bands
displayfea_vector[:,5]=displayfea_vector[:,5]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,5]=colorfea_vector[:,5]+colorfeabands
NDVI=(bands[0,:,:]-bands[2,:,:])/(bands[0,:,:]+bands[2,:,:])
tempdict={'NDVI':NDVI}
if 'NDVI' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,0]=originfea_vector[:,9]+NDVI
image=cv2.resize(NDVI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NDVI':image}
displays.update(worktempdict)
fea_bands=NDVI.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,0]=fea_vector[:,9]+fea_bands
displayfea_vector[:,0]=displayfea_vector[:,0]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,0]=colorfea_vector[:,0]+colorfeabands
NGRDI=(bands[1,:,:]-bands[0,:,:])/(bands[1,:,:]+bands[0,:,:])
tempdict={'NGRDI':NGRDI}
if 'NGRDI' not in originbands:
originbands.update(tempdict)
image=cv2.resize(NGRDI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NGRDI':image}
displays.update(worktempdict)
if channel>=1:
nirbands=self.batch_Multigraybands[self.file].bands
NDVI=(nirbands[0,:,:]-bands[1,:,:])/(nirbands[0,:,:]+bands[1,:,:])
tempdict={'NDVI':NDVI}
#if 'NDVI' not in originbandarray:
originbands.update(tempdict)
image=cv2.resize(NDVI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NDVI':image}
displays.update(worktempdict)
'''PCA part'''
displayfea_vector=np.concatenate((fea_vector,displayfea_vector),axis=1)
M=np.mean(displayfea_vector.T,axis=1)
OM=np.mean(fea_vector.T,axis=1)
print('M',M,'M shape',M.shape, 'OM',OM,'OM Shape',OM.shape)
C=displayfea_vector-M
OC=fea_vector-OM
#max=np.max(C.T,axis=1)
#print('MAX',max)
#C=C/max
print('C',C,'OC',OC)
#V=np.cov(C.T)
V=np.corrcoef(C.T)
OV=np.corrcoef(OC.T)
std=np.std(displayfea_vector.T,axis=1)
O_std=np.std(fea_vector.T,axis=1)
print(std,O_std)
std_displayfea=C/std
O_stddisplayfea=OC/O_std
print(std_displayfea,O_stddisplayfea)
#eigvalues,eigvectors=np.linalg.eig(V)
#n,m=displayfea_vector.shape
#C=np.dot(displayfea_vector.T,displayfea_vector)/(n-1)
V_var=np.cov(std_displayfea.T)
print('COV',V_var)
print('COR',V)
eigvalues=la.eigvals(V_var)
#eigvalues=np.linalg.eigvals(C)
print('eigvalue',eigvalues)
idx=np.argsort(eigvalues)
print('idx',idx)
eigvalues,eigvectors=np.linalg.eig(V)
print('eigvalue',eigvalues)
print('eigvectors',eigvectors)
eigvalueperc={}
featurechannel=10
# for i in range(len(eigvalues)):
# print('percentage',i,eigvalues[i]/sum(eigvalues))
# eigvalueperc.update({i:eigvalues[i]/sum(eigvalues)})
# #if eigvalues[i]>0:
# featurechannel+=1
# o_eigenvalue,o_eigenvector=np.linalg.eig(OV)
pcabands=np.zeros((displayfea_vector.shape[0],featurechannel))
# o_pcabands=np.zeros((fea_vector.shape[0],featurechannel))
pcavar={}
#separate PCA
# for i in range(3):
# pcn=o_eigenvector[:,i]
# pcnbands=np.dot(O_stddisplayfea,pcn)
# pcvar=np.var(pcnbands)
# print('pc',i+1,' var=',pcvar)
# pcabands[:,i]=pcabands[:,i]+pcnbands
# for i in range(7):
# pcn=eigvectors[:,i]
# # opcn=o_eigenvector[:,i]
# #pcnbands=np.dot(displayfea_vector,pcn)
# pcnbands=np.dot(std_displayfea,pcn)
# # opcnbands=np.dot(O_stddisplayfea,opcn)
# pcvar=np.var(pcnbands)
# print('pc',i+1,' var=',pcvar)
# temppcavar={i:pcvar}
# pcavar.update(temppcavar)
# # pcnbands=np.dot(C,pcn)
# # opcnbands=np.dot(OC,opcn)
# pcabands[:,i+3]=pcabands[:,i+3]+pcnbands
#combined PCa
for i in range(featurechannel):
pcn=eigvectors[:,i]
# pcnbands=np.dot(std_displayfea,pcn)
pcnbands=np.dot(C,pcn)
pcvar=np.var(pcnbands)
print('pc',i+1,' var=',pcvar)
temppcavar={i:pcvar}
pcavar.update(temppcavar)
pcabands[:,i]=pcabands[:,i]+pcnbands
# o_pcabands[:,i]=o_pcabands[:,i]+opcnbands
# sortvar=sorted(pcavar,key=pcavar.get)
# print(sortvar)
# for i in range(len(sortvar)):
# pcn=eigvectors[:,sortvar[i]]
# pcnbands=np.dot(displayfea_vector,pcn)
# pcabands[:,i]=pcabands[:,i]+pcnbands
#np.savetxt('pcs.csv',pcabands,delimiter=',',fmt='%s')
#np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%s')
#high,width=pcabands.shape
#fp=open('pcs.csv',w)
#fc=open('color-index.csv',w)
#head=['Otsu','NDI','R','G','B','Greenness','VEG','CIVE','MExG','NDVI']
#for i in range(high):
# '''No PCA'''
# colorfea_vector=np.concatenate((fea_vector,colorfea_vector),axis=1)
# displayfea_vector=np.concatenate((fea_vector,displayfea_vector),axis=1)
# M=np.mean(colorfea_vector.T,axis=1)
# print('colorfea_vector M',M)
# pcabands=np.copy(colorfea_vector)
# featurechannel=10
#threedplot(pcabands)
# self.batch_originpcabands.update({self.file:o_pcabands})
self.batch_originpcabands.update({self.file:displayfea_vector})
pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
#originbands={'LabOstu':pcabandsdisplay}
tempdictdisplay={'LabOstu':pcabandsdisplay}
#displaybandarray.update({file:displays})
self.batch_displaybandarray.update({self.file:tempdictdisplay})
self.batch_originbandarray.update({self.file:originbands})
def kmeansclassify(self):
if kmeans==0:
messagebox.showerror('Kmeans error','Kmeans should greater than 0')
return None
file=self.file
originpcabands=self.batch_displaybandarray[file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
tempband=np.zeros((pcah,pcaw,1))
if pcweight==0.0:
tempband[:,:,0]=tempband[:,:,0]+originpcabands[:,:,pcs]
else:
if pcweight<0.0:
rgbpc=originpcabands[:,:,0]
else:
rgbpc=originpcabands[:,:,1]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweight)*2*rgbpc
colorpc=originpcabands[:,:,pcs]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweight)*2)*colorpc
tempband[:,:,0]=tempband[:,:,0]+firstterm+secondterm
self.displaypclagels=np.copy(tempband[:,:,0])
if kmeans==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave(file+'_k=1.png',displaylabels)
else:
if kmeans>1:
h,w,c=tempband.shape
print('shape',tempband.shape)
reshapedtif=tempband.reshape(tempband.shape[0]*tempband.shape[1],c)
print('reshape',reshapedtif.shape)
clf=KMeans(n_clusters=kmeans,init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(reshapedtif)
# print('label=0',np.any(tempdisplayimg==0))
displaylabels=tempdisplayimg.labels_.reshape((self.batch_displaybandarray[self.file]['LabOstu'].shape[0],
self.batch_displaybandarray[self.file]['LabOstu'].shape[1]))
clusterdict={}
displaylabels=displaylabels+10
for i in range(kmeans):
locs=np.where(tempdisplayimg.labels_==i)
maxval=reshapedtif[locs].max()
print(maxval)
clusterdict.update({maxval:i+10})
print(clusterdict)
sortcluster=list(sorted(clusterdict))
print(sortcluster)
for i in range(len(sortcluster)):
cluster_num=clusterdict[sortcluster[i]]
displaylabels=np.where(displaylabels==cluster_num,i,displaylabels)
return displaylabels
def kmeansclassify_oldversion(self):
if kmeans==0:
messagebox.showerror('Kmeans error','Kmeans should greater than 0')
return None
file=self.file
originpcabands=self.batch_displaybandarray[self.file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
print(self.file,'originpcabands',pcah,pcaw,pcac)
pcakeys=pcs
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
if kmeans==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave(file+'_k=1.png',displaylabels)
else:
#tempband=displaybandarray[currentfilename]['LabOstu']
if kmeans>1:
h,w,c=tempband.shape
print('shape',tempband.shape)
reshapedtif=tempband.reshape(tempband.shape[0]*tempband.shape[1],c)
print('reshape',reshapedtif.shape)
clf=KMeans(n_clusters=kmeans,init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(reshapedtif)
# print('label=0',np.any(tempdisplayimg==0))
displaylabels=tempdisplayimg.labels_.reshape((self.batch_displaybandarray[self.file]['LabOstu'].shape[0],
self.batch_displaybandarray[self.file]['LabOstu'].shape[1]))
clusterdict={}
displaylabels=displaylabels+10
for i in range(kmeans):
locs=np.where(tempdisplayimg.labels_==i)
maxval=reshapedtif[locs].max()
print(maxval)
clusterdict.update({maxval:i+10})
print(clusterdict)
sortcluster=list(sorted(clusterdict))
print(sortcluster)
for i in range(len(sortcluster)):
cluster_num=clusterdict[sortcluster[i]]
displaylabels=np.where(displaylabels==cluster_num,i,displaylabels)
return displaylabels
def generateimgplant(self,displaylabels):
colordicesband=np.copy(displaylabels)
tempdisplayimg=np.zeros((self.batch_displaybandarray[self.file]['LabOstu'].shape[0],
self.batch_displaybandarray[self.file]['LabOstu'].shape[1]))
colordivimg=np.zeros((self.batch_displaybandarray[self.file]['LabOstu'].shape[0],
self.batch_displaybandarray[self.file]['LabOstu'].shape[1]))
for i in range(len(kmeans_sel)):
sk=kmeans_sel[i]-1
tempdisplayimg=np.where(displaylabels==sk,1,tempdisplayimg)
currentlabels=np.copy(tempdisplayimg)
originbinaryimg=np.copy(tempdisplayimg)
tempcolorimg=np.copy(displaylabels).astype('float32')
ratio=batch_findratio([tempdisplayimg.shape[0],tempdisplayimg.shape[1]],[850,850])
if tempdisplayimg.shape[0]*tempdisplayimg.shape[1]<850*850:
tempdisplayimg=cv2.resize(tempdisplayimg,(int(tempdisplayimg.shape[1]*ratio),int(tempdisplayimg.shape[0]*ratio)))
colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]*ratio),int(colordivimg.shape[0]*ratio)))
else:
tempdisplayimg=cv2.resize(tempdisplayimg,(int(tempdisplayimg.shape[1]/ratio),int(tempdisplayimg.shape[0]/ratio)))
colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]/ratio),int(colordivimg.shape[0]/ratio)))
binaryimg=np.zeros((tempdisplayimg.shape[0],tempdisplayimg.shape[1],3))
colordeimg=np.zeros((colordivimg.shape[0],colordivimg.shape[1],3))
locs=np.where(tempdisplayimg==1)
binaryimg[locs]=[240,228,66]
for i in range(kmeans):
locs=np.where(colordivimg==i)
colordeimg[locs]=batch_colorbandtable[i]
Image.fromarray(colordeimg.astype('uint8')).save(self.file+'-allcolorindex.png',"PNG")
Image.fromarray((binaryimg.astype('uint8'))).save(self.file+'-binaryimg.png',"PNG")
return currentlabels,originbinaryimg
def resegment(self):
if type(self.reseglabels) == type(None):
return False
labels=np.copy(self.reseglabels)
reseglabels,border,colortable,labeldict=tkintercorestat.resegmentinput(labels,minthres,maxthres,minlw,maxlw)
self.batch_results.update({self.file:(labeldict,{})})
return True
def extraction(self,currentlabels):
if kmeans==1:
messagebox.showerror('Invalid Class #',message='#Class = 1, try change it to 2 or more, and refresh Color-Index.')
return False
nonzeros=np.count_nonzero(currentlabels)
print('nonzero counts',nonzeros)
nonzeroloc=np.where(currentlabels!=0)
try:
ulx,uly=min(nonzeroloc[1]),min(nonzeroloc[0])
except:
messagebox.showerror('Invalid Colorindices',message='Need to process colorindicies')
return False
rlx,rly=max(nonzeroloc[1]),max(nonzeroloc[0])
nonzeroratio=float(nonzeros)/((rlx-ulx)*(rly-uly))
print(nonzeroratio)
if nonzeroratio>std_nonzeroratio*2:
return False
dealpixel=nonzeroratio*currentlabels.shape[0]*currentlabels.shape[1]
ratio=1
if nonzeroratio<=0.2:# and nonzeroratio>=0.1:
ratio=batch_findratio([currentlabels.shape[0],currentlabels.shape[1]],[1600,1600])
if currentlabels.shape[0]*currentlabels.shape[1]>1600*1600:
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/ratio),int(currentlabels.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
else:
#ratio=1
#print('nonzeroratio',ratio)
workingimg=np.copy(currentlabels)
segmentratio=0
else:
print('deal pixel',dealpixel)
if dealpixel>512000:
if currentlabels.shape[0]*currentlabels.shape[1]>850*850:
segmentratio=batch_findratio([currentlabels.shape[0],currentlabels.shape[1]],[850,850])
if segmentratio<2:
segmentratio=2
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/segmentratio),int(currentlabels.shape[0]/segmentratio)),interpolation=cv2.INTER_LINEAR)
else:
segmentratio=1
#print('ratio',ratio)
workingimg=np.copy(currentlabels)
pixelmmratio=1.0
coin=False
print('nonzeroratio:',ratio,'segmentation ratio',segmentratio)
print('workingimgsize:',workingimg.shape)
pyplt.imsave('workingimg.png',workingimg)
originlabels=None
if originlabels is None:
originlabels,border,colortable,originlabeldict=tkintercorestat.init(workingimg,workingimg,'',workingimg,10,coin)
self.reseglabels=originlabels
self.batch_results.update({self.file:(originlabeldict,{})})
return True
def savePCAimg(self,originfile):
file=self.file
path=self.exportpath
originpcabands=self.batch_displaybandarray[file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
tempband=np.zeros((pcah,pcaw))
if pcweight==0.0:
tempband=tempband+originpcabands[:,:,pcs]
else:
if pcweight<0.0:
rgbpc=originpcabands[:,:,0]
else:
rgbpc=originpcabands[:,:,1]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweight)*2*rgbpc
colorpc=originpcabands[:,:,pcs]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweight)*2)*colorpc
tempband=tempband+firstterm+secondterm
displaylabels=np.copy(tempband)
if displaylabels.min()<0:
displaylabels=displaylabels-displaylabels.min()
colorrange=displaylabels.max()-displaylabels.min()
displaylabels=displaylabels*255/colorrange
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
originheight,originwidth=self.batch_Multigraybands[file].size
origingray=grayimg.resize([originwidth,originheight],resample=Image.BILINEAR)
origingray.save(path+'/'+originfile+'-PCAimg.png',"PNG")
# addcolorstrip()
return
def savePCAimg_oldversion(self,originfile):
file=self.file
path=self.exportpath
originpcabands=self.batch_displaybandarray[file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcakeys=pcs
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
displaylabels=np.mean(tempband,axis=2)
# generateimgplant(displaylabels)
# grayimg=(((displaylabels-displaylabels.min())/(displaylabels.max()-displaylabels.min()))*255.9).astype(np.uint8)
# pyplt.imsave('k=1.png',displaylabels.astype('uint8'))
# pyplt.imsave('k=1.png',grayimg)
if displaylabels.min()<0:
displaylabels=displaylabels-displaylabels.min()
colorrange=displaylabels.max()-displaylabels.min()
displaylabels=displaylabels*255/colorrange
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
originheight,originwidth=self.batch_Multigraybands[file].size
origingray=grayimg.resize([originwidth,originheight],resample=Image.BILINEAR)
origingray.save(path+'/'+originfile+'-PCAimg.png',"PNG")
# addcolorstrip()
return
def showcounting(self,tup,number=True,frame=True,header=True,whext=False,blkext=False):
labels=tup[0]
colortable=tup[2]
coinparts=tup[3]
filename=tup[4]
uniquelabels=list(colortable.keys())
imgrsc=cv2.imread(FOLDER+'/'+filename,flags=cv2.IMREAD_ANYCOLOR)
imgrsc=cv2.cvtColor(imgrsc,cv2.COLOR_BGR2RGB)
imgrsc=cv2.resize(imgrsc,(labels.shape[1],labels.shape[0]),interpolation=cv2.INTER_LINEAR)
image=Image.fromarray(imgrsc)
if whext==True:
# blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg[:,:,:]=[255,255,255]
itemlocs=np.where(labels!=0)
# blkbkg[itemlocs]=imgrsc[itemlocs]
whbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(whbkg.astype('uint8'))
if blkext==True:
blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
itemlocs=np.where(labels!=0)
blkbkg[itemlocs]=imgrsc[itemlocs]
blkbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(blkbkg.astype('uint8'))
print('showcounting_resize',image.size)
image.save('beforlabel.gif',append_images=[image])
draw=ImageDraw.Draw(image)
sizeuniq,sizecounts=np.unique(labels,return_counts=True)
minsize=min(sizecounts)
suggsize=int(minsize**0.5)
if suggsize>22:
suggsize=22
if suggsize<14:
suggsize=14
font=ImageFont.truetype('cmb10.ttf',size=suggsize)
for uni in uniquelabels:
if uni!=0:
pixelloc = np.where(labels == uni)
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
print(ulx, uly, rlx, rly)
if frame==True:
draw.polygon([(ulx,uly),(rlx,uly),(rlx,rly),(ulx,rly)],outline='red')
if number==True:
if uni in colortable:
canvastext = str(colortable[uni])
else:
canvastext = 'No label'
# if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=font, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=font,fill='black')
if header==True:
content='item count:'+str(len(uniquelabels))+'\n File: '+filename
contentlength=len(content)+50
#rectext=canvas.create_text(10,10,fill='black',font='Times 16',text=content,anchor=NW)
draw.text((10-1, 10+1), text=content, font=font, fill='white')
draw.text((10+1, 10+1), text=content, font=font, fill='white')
draw.text((10-1, 10-1), text=content, font=font, fill='white')
draw.text((10+1, 10-1), text=content, font=font, fill='white')
#draw.text((10,10),text=content,font=font,fill=(141,2,31,0))
draw.text((10,10),text=content,font=font,fill='black')
#image.save(originfile+'-countresult'+extension,"JPEG")
#firstimg=Multigraybands[currentfilename]
#height,width=firstimg.size
height,width,channel=self.batch_displaybandarray[filename]['LabOstu'].shape
ratio=batch_findratio([height,width],[850,850])
#if labels.shape[0]*labels.shape[1]<850*850:
# disimage=image.resize([int(labels.shape[1]*ratio),int(labels.shape[0]*ratio)],resample=Image.BILINEAR)
#else:
# disimage=image.resize([int(labels.shape[1]/ratio),int(labels.shape[0]/ratio)],resample=Image.BILINEAR)
print('show counting ratio',ratio)
if height*width<850*850:
print('showcounting small')
disimage=image.resize([int(width*ratio),int(height*ratio)],resample=Image.BILINEAR)
else:
print('showcounting big')
disimage=image.resize([int(width/ratio),int(height/ratio)],resample=Image.BILINEAR)
print('showcounting shape',disimage.size)
displayoutput=ImageTk.PhotoImage(disimage)
disimage.save('output.gif',append_images=[disimage])
#image.save('originoutput.gif',append_images=[image])
return displayoutput,image,disimage
def export_ext(self,whext=False,blkext=False):
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
file=self.file
path=self.exportpath
suggsize=8
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
# kernersizes={}
# for file in batch_filenames:
labeldict=self.batch_results[file][0]
itervalue='iter0'
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
colortable=labeldict[itervalue]['colortable']
head_tail=os.path.split(file)
originfile,extension=os.path.splitext(head_tail[1])
if len(path)>0:
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=self.showcounting(tup,False,True,True,whext,blkext)
imageband=segimg
draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
tempdict={}
pixelmmratio=1.0
print('pixelmmratio',pixelmmratio)
if file not in self.kernersizes:
for uni in uniquelabels:
if uni !=0:
pixelloc = np.where(labels == float(uni))
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
print(ulx, uly, rlx, rly)
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
length={}
currborder=tkintercore.get_boundaryloc(labels,uni)
# print('currborder',currborder)
print('currborder length',len(currborder[0])*len(currborder[1]))
pixperc=float(len(pixelloc[0])/(labels.shape[0]*labels.shape[1]))
print('pix length percentage',pixperc)
if pixperc>0.06:
x0=ulx
y0=uly
x1=rlx
y1=rly
kernellength=float(((x0-x1)**2+(y0-y1)**2)**0.5)
else:
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
templength=float(((currborder[0][i]-currborder[0][j])**2+(currborder[1][i]-currborder[1][j])**2)**0.5)
length.update({(i,j):templength})
sortedlength=sorted(length,key=length.get,reverse=True)
try:
topcouple=sortedlength[0]
except:
continue
kernellength=length[topcouple]
i=topcouple[0]
j=topcouple[1]
x0=currborder[1][i]
y0=currborder[0][i]
x1=currborder[1][j]
y1=currborder[0][j]
#slope=float((y0-y1)/(x0-x1))
linepoints=[(currborder[1][i],currborder[0][i]),(currborder[1][j],currborder[0][j])]
#draw.line(linepoints,fill='yellow')
#points=linepixels(currborder[1][i],currborder[0][i],currborder[1][j],currborder[0][j])
lengthpoints=cal_kernelsize.bresenhamline(x0,y0,x1,y1) #x0,y0,x1,y1
for point in lengthpoints:
# if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='yellow')
tengentaddpoints=cal_kernelsize.tengentadd(x0,y0,x1,y1,rlx,rly,labels,uni) #find tangent line above
#for point in tengentaddpoints:
#if int(point[0])>=ulx and int(point[0])<=rlx and int(point[1])>=uly and int(point[1])<=rly:
# draw.point([int(point[0]),int(point[1])],fill='green')
tengentsubpoints=cal_kernelsize.tengentsub(x0,y0,x1,y1,ulx,uly,labels,uni) #find tangent line below
#for point in tengentsubpoints:
# draw.point([int(point[0]),int(point[1])],fill='green')
pointmatchdict={}
for i in range(len(tengentaddpoints)): #find the pixel pair with shortest distance
width=kernellength
pointmatch=[]
point=tengentaddpoints[i]
try:
templabel=labels[int(point[1]),int(point[0])]
except:
continue
if templabel==uni:
for j in range(len(tengentsubpoints)):
subpoint=tengentsubpoints[j]
tempwidth=float(((point[0]-subpoint[0])**2+(point[1]-subpoint[1])**2)**0.5)
if tempwidth<width:
pointmatch[:]=[]
pointmatch.append(point)
pointmatch.append(subpoint)
#print('tempwidth',width)
width=tempwidth
if len(pointmatch)>0:
#print('pointmatch',pointmatch)
pointmatchdict.update({(pointmatch[0],pointmatch[1]):width})
widthsort=sorted(pointmatchdict,key=pointmatchdict.get,reverse=True)
try:
pointmatch=widthsort[0]
print('final pointmatch',pointmatch)
except:
continue
if len(pointmatch)>0:
x0=int(pointmatch[0][0])
y0=int(pointmatch[0][1])
x1=int(pointmatch[1][0])
y1=int(pointmatch[1][1])
# if imgtypevar.get()=='0':
draw.line([(x0,y0),(x1,y1)],fill='yellow')
width=float(((x0-x1)**2+(y0-y1)**2)**0.5)
print('width',width,'length',kernellength)
print('kernelwidth='+str(width*pixelmmratio))
print('kernellength='+str(kernellength*pixelmmratio))
#print('kernelwidth='+str(kernelwidth*pixelmmratio))
tempdict.update({uni:[kernellength,width,pixelmmratio**2*len(pixelloc[0]),kernellength*pixelmmratio,width*pixelmmratio]})
if uni in colortable:
canvastext = str(colortable[uni])
else:
canvastext = 'No label'
# if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=smallfont,fill='black')
#print(event.x, event.y, labels[event.x, event.y], ulx, uly, rlx, rly)
#recborder = canvas.create_rectangle(ulx, uly, rlx, rly, outline='red')
#drawcontents.append(recborder)
self.kernersizes.update({file:tempdict})
originheight,originwidth=self.batch_Multigraybands[file].size
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
extcolor=""
if whext==True:
extcolor= "-extwht"
if blkext==True:
extcolor="-extblk"
image.save(path+'/'+originfile+extcolor+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=self.showcounting(tup,False,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=self.showcounting(tup,True,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-labelresult'+'.png',"PNG")
def export_result(self):
file=self.file
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
suggsize=8
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
self.kernersizes={}
# path=filedialog.askdirectory()
self.export_ext(True,False)
self.export_ext(False,True)
# for file in batch_filenames:
labeldict=self.batch_results[self.file][0]
itervalue='iter0'
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
colortable=labeldict[itervalue]['colortable']
head_tail=os.path.split(self.file)
originfile,extension=os.path.splitext(head_tail[1])
if len(self.exportpath)>0:
tup=(labels,counts,colortable,[],self.file)
_band,segimg,small_segimg=self.showcounting(tup,False)
#imageband=outputimgbands[file][itervalue]
imageband=segimg
# draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
# tempdict={}
pixelmmratio=1.0
#print('coinsize',coinsize.get(),'pixelmmratio',pixelmmratio)
print('pixelmmratio',pixelmmratio)
originheight,originwidth=self.batch_Multigraybands[file].size
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
image.save(self.exportpath+'/'+originfile+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=self.showcounting(tup,False)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(self.exportpath+'/'+originfile+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=self.showcounting(tup,True)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(self.exportpath+'/'+originfile+'-labelresult'+'.png',"PNG")
originrestoredband=np.copy(labels)
restoredband=originrestoredband.astype('uint8')
colordicesband=self.batch_colordicesband[file]
colordiv=np.zeros((colordicesband.shape[0],colordicesband.shape[1],3))
self.savePCAimg(originfile)
# kvar=int(kmeans.get())
# print('kvar',kvar)
# for i in range(kvar):
# locs=np.where(colordicesband==i)
# colordiv[locs]=colorbandtable[i]
# colordivimg=Image.fromarray(colordiv.astype('uint8'))
# colordivimg.save(path+'/'+originfile+'-colordevice'+'.jpeg',"JPEG")
colordivimg=Image.open(file+'-allcolorindex.png')
copycolordiv=colordivimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copycolordiv.save(self.exportpath+'/'+originfile+'-colordevice'+'.png',"PNG")
#pyplt.imsave(path+'/'+originfile+'-colordevice'+'.png',colordiv.astype('uint8'))
# copybinary=np.zeros((originbinaryimg.shape[0],originbinaryimg.shape[1],3),dtype='float')
# nonzeros=np.where(originbinaryimg==1)
# copybinary[nonzeros]=[255,255,0]
# binaryimg=Image.fromarray(copybinary.astype('uint8'))
binaryimg=Image.open(file+'-binaryimg.png')
copybinaryimg=binaryimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copybinaryimg.save(self.exportpath+'/'+originfile+'-binaryimg'+'.png',"PNG")
# pyplt.imsave(path+'/'+originfile+'-binaryimg'+'.png',originbinaryimg.astype('uint8'))
#restoredband=cv2.resize(src=restoredband,dsize=(originwidth,originheight),interpolation=cv2.INTER_LINEAR)
print(restoredband.shape)
currentsizes=self.kernersizes[self.file]
indicekeys=list(self.batch_originbandarray[self.file].keys())
indeclist=[ 0 for i in range(len(indicekeys)*3)]
pcalist=[0 for i in range(3)]
# temppcabands=np.zeros((self.batch_originpcabands[self.file].shape[0],len(pcs)))
# for i in range(len(pcs)):
# temppcabands[:,i]=temppcabands[:,i]+self.batch_originpcabands[self.file][:,pcs[i]-1]
# pcabands=np.mean(temppcabands,axis=1)
# # pcabands=pcabands.reshape((originheight,originwidth))
# pcabands=pcabands.reshape((self.displayfea_l,self.displayfea_w))
pcabands=np.copy(self.displaypclagels)
datatable={}
origindata={}
for key in indicekeys:
data=self.batch_originbandarray[self.file][key]
data=data.tolist()
tempdict={key:data}
origindata.update(tempdict)
print(key)
# for uni in colortable:
print(uniquelabels)
print('len uniquelabels',len(uniquelabels))
for uni in uniquelabels:
print(uni,colortable[uni])
uniloc=np.where(labels==float(uni))
if len(uniloc)==0 or len(uniloc[1])==0:
print('no uniloc\n')
print(uniloc[0],uniloc[1])
continue
smalluniloc=np.where(originrestoredband==uni)
ulx,uly=min(smalluniloc[1]),min(smalluniloc[0])
rlx,rly=max(smalluniloc[1]),max(smalluniloc[0])
width=rlx-ulx
length=rly-uly
print(width,length)
subarea=restoredband[uly:rly+1,ulx:rlx+1]
subarea=subarea.tolist()
amount=len(uniloc[0])
print(amount)
try:
sizes=currentsizes[uni]
except:
print('no sizes\n')
continue
#templist=[amount,length,width]
templist=[amount,sizes[0],sizes[1],sizes[2],sizes[3],sizes[4]]
tempdict={colortable[uni]:templist+indeclist+pcalist} #NIR,Redeyes,R,G,B,NDVI,area
print(tempdict)
for ki in range(len(indicekeys)):
originNDVI=origindata[indicekeys[ki]]
print(len(originNDVI),len(originNDVI[0]))
pixellist=[]
for k in range(len(uniloc[0])):
#print(uniloc[0][k],uniloc[1][k])
try:
tempdict[colortable[uni]][6+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
tempdict[colortable[uni]][7+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
pixellist.append(originNDVI[uniloc[0][k]][uniloc[1][k]])
tempdict[colortable[uni]][ki*3+6]=tempdict[colortable[uni]][ki*3+6]/amount
tempdict[colortable[uni]][ki*3+8]=np.std(pixellist)
pixellist=[]
for k in range(len(uniloc[0])):
try:
tempdict[colortable[uni]][-2]+=pcabands[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
tempdict[colortable[uni]][-3]+=pcabands[uniloc[0][k]][uniloc[1][k]]
pixellist.append(pcabands[uniloc[0][k]][uniloc[1][k]])
tempdict[colortable[uni]][-3]=tempdict[colortable[uni]][-3]/amount
tempdict[colortable[uni]][-1]=np.std(pixellist)
datatable.update(tempdict)
filename=self.exportpath+'/'+originfile+'-outputdata.csv'
with open(filename,mode='w') as f:
csvwriter=csv.writer(f)
rowcontent=['Index','Plot','Area(#pixel)','Length(#pixel)','Width(#pixel)','Area(mm2)','Length(mm)','Width(mm)']
for key in indicekeys:
rowcontent.append('avg-'+str(key))
rowcontent.append('sum-'+str(key))
rowcontent.append('std-'+str(key))
rowcontent.append('avg-PCA')
rowcontent.append('sum-PCA')
rowcontent.append('std-PCA')
#csvwriter.writerow(['ID','NIR','Red Edge','Red','Green','Blue','NIRv.s.Green','LabOstu','area(#of pixel)'])
#csvwriter.writerow(['Index','Plot','Area(#pixels)','avg-NDVI','sum-NDVI','std-NDVI','Length(#pixel)','Width(#pixel)'])#,'#holes'])
csvwriter.writerow(rowcontent)
i=1
for uni in datatable:
row=[i,uni]
for j in range(len(datatable[uni])):
row.append(datatable[uni][j])
#row=[i,uni,datatable[uni][0],datatable[uni][1],datatable[uni][2],datatable[uni][5],datatable[uni][3],datatable[uni][4]]#,
#datatable[uni][5]]
i+=1
print(row)
csvwriter.writerow(row)
print('total data length=',len(datatable))
def process(self):
if self.Open_batchimage()==False:
return
self.singleband()
colordicesband=self.kmeansclassify()
if type(colordicesband)==type(None):
return
self.batch_colordicesband.update({self.file:colordicesband})
currentlabels,originbinaryimg=self.generateimgplant(colordicesband)
if self.extraction(currentlabels)==False:
return
if self.resegment()==False:
return
self.export_result()
batch_filenames=[]
batch_Multiimage={}
batch_Multigray={}
batch_Multitype={}
batch_Multiimagebands={}
batch_Multigraybands={}
batch_displaybandarray={}
batch_originbandarray={}
batch_originpcabands={}
batch_colordicesband={}
batch_results={}
pcweight=0
pcs=0
kmeans=0
kmeans_sel=[]
maxthres=0
minthres=0
maxlw=0
minlw=0
std_nonzeroratio=0
FOLDER=''
exportpath=''
def batch_findratio(originsize,objectsize):
oria=originsize[0]
orib=originsize[1]
obja=objectsize[0]
objb=objectsize[1]
if oria>obja or orib>objb:
ratio=round(max((oria/obja),(orib/objb)))
else:
ratio=round(min((obja/oria),(objb/orib)))
if oria*orib>850 * 850:
if ratio<2:
ratio=2
return ratio
def Open_batchimage(dir,filename):
global batch_Multiimage,batch_Multigray,batch_Multitype,batch_Multiimagebands,batch_Multigraybands
try:
Filersc=cv2.imread(dir+'/'+filename,flags=cv2.IMREAD_ANYCOLOR)
height,width,channel=np.shape(Filersc)
Filesize=(height,width)
print('filesize:',height,width)
RGBfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2RGB)
Grayfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2Lab)
Grayfile=cv2.cvtColor(Grayfile,cv2.COLOR_BGR2GRAY)
Grayimg=batch_img(Filesize,Grayfile)
RGBbands=np.zeros((channel,height,width))
for j in range(channel):
band=RGBfile[:,:,j]
band=np.where(band==0,1e-6,band)
RGBbands[j,:,:]=band
RGBimg=batch_img(Filesize,RGBbands)
tempdict={filename:RGBimg}
batch_Multiimagebands.update(tempdict)
tempdict={filename:Grayfile}
batch_Multigray.update(tempdict)
tempdict={filename:0}
batch_Multitype.update(tempdict)
tempdict={filename:Grayimg}
batch_Multigraybands.update(tempdict)
# batch_filenames.append(filename)
except:
# messagebox.showerror('Invalid Image Format','Cannot open '+filename)
return False
return True
def Open_batchfile():
global pcs,pcweight,kmeans,kmeans_sel,maxthres,minthres,maxlw,minlw,std_nonzeroratio
btfile=filedialog.askopenfilename()
if len(btfile)>0:
if '.txt' in btfile:
with open(btfile,mode='r') as f:
setting=f.readlines()
# print(setting)
pcweight=float(setting[0].split(',')[1])
pcs=int(setting[1].split(',')[1])+1
# print(pcs)
# for i in range(len(pcs)):
# pcs[i]=int(pcs[i])
kmeans=setting[2].split(',')[1]
kmeans=int(kmeans)
kmeans_sel=setting[3].split(',')[1:-1]
maxthres=setting[4].split(',')[1]
try:
maxthres=float(maxthres)
except:
messagebox.showerror('Load Max area error','No Max area threshold value.')
return
minthres=setting[5].split(',')[1]
minthres=float(minthres)
maxlw=setting[6].split(',')[1]
maxlw=float(maxlw)
minlw=setting[7].split(',')[1]
minlw=float(minlw)
std_nonzeroratio=float(setting[8].split(',')[1])
for i in range(len(kmeans_sel)):
kmeans_sel[i]=int(kmeans_sel[i])
print('PCweight',pcweight,'PCsel',pcs,'KMeans',kmeans,'KMeans-Selection',kmeans_sel)
print('maxthres',maxthres,'minthres',minthres,'maxlw',maxlw,'minlw',minlw)
messagebox.showinfo('Batch settings','PCweight='+str(pcweight)+'\nPCsel='+str(pcs)+'\nKMeans='+str(kmeans)+
'\nCluster selection'+str(kmeans_sel)+'\nMax area='+str(maxthres)+
'\nMin area='+str(minthres)+'\nMax diagonal='+str(maxlw)+'\nMin diagonal='+
str(minlw))
def Open_batchfolder():
# global batch_filenames,batch_Multiimage,batch_Multigray,batch_Multitype,batch_Multiimagebands,batch_Multigraybands
# global batch_displaybandarray,batch_originbandarray,batch_originpcabands
# global pcs,kmeans,kmeans_sel
# global batch_results
global batch_filenames
global FOLDER
batch_filenames=[]
# batch_Multiimage={}
# batch_Multigray={}
# batch_Multitype={}
# batch_Multiimagebands={}
# batch_Multigraybands={}
#
# batch_displaybandarray={}
# batch_originbandarray={}
# batch_originpcabands={}
#
# batch_results={}
# pcs=0
# kmeans=0
# kmeans_sel=0
FOLDER=filedialog.askdirectory()
if len(FOLDER)>0:
print(FOLDER)
# for root, dirs,files in os.walk(FOLDER):
files=os.listdir(FOLDER)
for filename in files:
# print('root',root)
# print('dirs',dirs)
# print("filename",filename)
batch_filenames.append(filename)
# batch_filenames.append(filename)
# Open_batchimage(FOLDER,filename)
# batch_singleband(filename)
# messagebox.showinfo('Finish loading','Loading Image finished')
# if len(batch_filenames)==0:
# messagebox.showerror('No file','No file under current folder')
# return
batch_filenames.sort()
print('filenames',batch_filenames)
def batch_kmeansclassify(file):
if kmeans==0:
messagebox.showerror('Kmeans error','Kmeans should greater than 0')
return
originpcabands=batch_displaybandarray[file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
print(file,'originpcabands',pcah,pcaw,pcac)
pcakeys=pcs
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
if kmeans==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave(file+'_k=1.png',displaylabels)
else:
#tempband=displaybandarray[currentfilename]['LabOstu']
if kmeans>1:
h,w,c=tempband.shape
print('shape',tempband.shape)
reshapedtif=tempband.reshape(tempband.shape[0]*tempband.shape[1],c)
print('reshape',reshapedtif.shape)
clf=KMeans(n_clusters=kmeans,init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(reshapedtif)
# print('label=0',np.any(tempdisplayimg==0))
displaylabels=tempdisplayimg.labels_.reshape((batch_displaybandarray[file]['LabOstu'].shape[0],
batch_displaybandarray[file]['LabOstu'].shape[1]))
return displaylabels
def batch_generateimgplant(displaylabels,file):
colordicesband=np.copy(displaylabels)
tempdisplayimg=np.zeros((batch_displaybandarray[file]['LabOstu'].shape[0],
batch_displaybandarray[file]['LabOstu'].shape[1]))
colordivimg=np.zeros((batch_displaybandarray[file]['LabOstu'].shape[0],
batch_displaybandarray[file]['LabOstu'].shape[1]))
for i in range(len(kmeans_sel)):
sk=kmeans_sel[i]-1
tempdisplayimg=np.where(displaylabels==sk,1,tempdisplayimg)
currentlabels=np.copy(tempdisplayimg)
originbinaryimg=np.copy(tempdisplayimg)
tempcolorimg=np.copy(displaylabels).astype('float32')
ratio=batch_findratio([tempdisplayimg.shape[0],tempdisplayimg.shape[1]],[850,850])
if tempdisplayimg.shape[0]*tempdisplayimg.shape[1]<850*850:
tempdisplayimg=cv2.resize(tempdisplayimg,(int(tempdisplayimg.shape[1]*ratio),int(tempdisplayimg.shape[0]*ratio)))
colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]*ratio),int(colordivimg.shape[0]*ratio)))
else:
tempdisplayimg=cv2.resize(tempdisplayimg,(int(tempdisplayimg.shape[1]/ratio),int(tempdisplayimg.shape[0]/ratio)))
colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]/ratio),int(colordivimg.shape[0]/ratio)))
binaryimg=np.zeros((tempdisplayimg.shape[0],tempdisplayimg.shape[1],3))
colordeimg=np.zeros((colordivimg.shape[0],colordivimg.shape[1],3))
locs=np.where(tempdisplayimg==1)
binaryimg[locs]=[240,228,66]
for i in range(kmeans):
locs=np.where(colordivimg==i)
colordeimg[locs]=batch_colorbandtable[i]
Image.fromarray(colordeimg.astype('uint8')).save(file+'-allcolorindex.png',"PNG")
Image.fromarray((binaryimg.astype('uint8'))).save(file+'-binaryimg.png',"PNG")
return currentlabels,originbinaryimg
def batch_extraction(currentlabels,file):
global batch_results
if kmeans==1:
messagebox.showerror('Invalid Class #',message='#Class = 1, try change it to 2 or more, and refresh Color-Index.')
return
nonzeros=np.count_nonzero(currentlabels)
print('nonzero counts',nonzeros)
nonzeroloc=np.where(currentlabels!=0)
try:
ulx,uly=min(nonzeroloc[1]),min(nonzeroloc[0])
except:
messagebox.showerror('Invalid Colorindices',message='Need to process colorindicies')
return
rlx,rly=max(nonzeroloc[1]),max(nonzeroloc[0])
nonzeroratio=float(nonzeros)/((rlx-ulx)*(rly-uly))
print(nonzeroratio)
dealpixel=nonzeroratio*currentlabels.shape[0]*currentlabels.shape[1]
ratio=1
if nonzeroratio<=0.2:# and nonzeroratio>=0.1:
ratio=batch_findratio([currentlabels.shape[0],currentlabels.shape[1]],[1600,1600])
if currentlabels.shape[0]*currentlabels.shape[1]>1600*1600:
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/ratio),int(currentlabels.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
else:
#ratio=1
#print('nonzeroratio',ratio)
workingimg=np.copy(currentlabels)
segmentratio=0
else:
print('deal pixel',dealpixel)
if dealpixel>512000:
if currentlabels.shape[0]*currentlabels.shape[1]>850*850:
segmentratio=batch_findratio([currentlabels.shape[0],currentlabels.shape[1]],[850,850])
if segmentratio<2:
segmentratio=2
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/segmentratio),int(currentlabels.shape[0]/segmentratio)),interpolation=cv2.INTER_LINEAR)
else:
segmentratio=1
#print('ratio',ratio)
workingimg=np.copy(currentlabels)
pixelmmratio=1.0
coin=False
print('nonzeroratio:',ratio,'segmentation ratio',segmentratio)
print('workingimgsize:',workingimg.shape)
pyplt.imsave('workingimg.png',workingimg)
originlabels=None
if originlabels is None:
originlabels,border,colortable,originlabeldict=tkintercorestat.init(workingimg,workingimg,'',workingimg,10,coin)
batch_results.update({file:(originlabeldict,{})})
def batch_proc_func(file):
if len(FOLDER)==0:
messagebox.showerror('No image folder','Need to assign image folder')
return
if len(exportpath)==0:
messagebox.showerror('No output folder','Need to assign output folder')
return
if len(pcs)==0:
messagebox.showerror('No batch file','Need to load batch file')
return
procobj=batch_ser_func(file)
procobj.process()
del procobj
def batch_process():
global batch_colordicesband
global batch_Multiimage,batch_Multigray,batch_Multitype,batch_Multiimagebands,batch_Multigraybands
global batch_displaybandarray,batch_originbandarray,batch_originpcabands
global batch_results
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
cpunum=multiprocessing.cpu_count()
print('# of CPUs',cpunum)
starttime=time.time()
print('start time',starttime)
for file in batch_filenames:
# batch_Multiimage={}
# batch_Multigray={}
# batch_Multitype={}
# batch_Multiimagebands={}
# batch_Multigraybands={}
#
# batch_displaybandarray={}
# batch_originbandarray={}
# batch_originpcabands={}
# batch_colordicesband={}
#
# batch_results={}
# if Open_batchimage(FOLDER,file)==False:
# continue
# batch_singleband(file)
# colordicesband=batch_kmeansclassify(file)
# batch_colordicesband.update({file:colordicesband})
# currentlabels,originbinaryimg=batch_generateimgplant(colordicesband,file)
# batch_extraction(currentlabels,file)
# batch_export_result(exportpath,file)
procobj=batch_ser_func(file)
procobj.process()
del procobj
# multi_pool=multiprocessing.Pool(int(cpunum/4))
# multi_pool.map(batch_proc_func,batch_filenames)
print('used time',time.time()-starttime)
messagebox.showinfo('Done','Batch process ends!')
def batch_showcounting(tup,number=True,frame=True,header=True,whext=False,blkext=False):
labels=tup[0]
colortable=tup[2]
coinparts=tup[3]
filename=tup[4]
uniquelabels=list(colortable.keys())
imgrsc=cv2.imread(FOLDER+'/'+filename,flags=cv2.IMREAD_ANYCOLOR)
imgrsc=cv2.cvtColor(imgrsc,cv2.COLOR_BGR2RGB)
imgrsc=cv2.resize(imgrsc,(labels.shape[1],labels.shape[0]),interpolation=cv2.INTER_LINEAR)
image=Image.fromarray(imgrsc)
if whext==True:
# blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg[:,:,:]=[255,255,255]
itemlocs=np.where(labels!=0)
# blkbkg[itemlocs]=imgrsc[itemlocs]
whbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(whbkg.astype('uint8'))
if blkext==True:
blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
itemlocs=np.where(labels!=0)
blkbkg[itemlocs]=imgrsc[itemlocs]
blkbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(blkbkg.astype('uint8'))
print('showcounting_resize',image.size)
image.save('beforlabel.gif',append_images=[image])
draw=ImageDraw.Draw(image)
sizeuniq,sizecounts=np.unique(labels,return_counts=True)
minsize=min(sizecounts)
suggsize=int(minsize**0.5)
if suggsize>22:
suggsize=22
if suggsize<14:
suggsize=14
font=ImageFont.truetype('cmb10.ttf',size=suggsize)
for uni in uniquelabels:
if uni!=0:
pixelloc = np.where(labels == uni)
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
print(ulx, uly, rlx, rly)
if frame==True:
draw.polygon([(ulx,uly),(rlx,uly),(rlx,rly),(ulx,rly)],outline='red')
if number==True:
if uni in colortable:
canvastext = str(colortable[uni])
else:
canvastext = 'No label'
# if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=font, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=font,fill='black')
if header==True:
content='item count:'+str(len(uniquelabels))+'\n File: '+filename
contentlength=len(content)+50
#rectext=canvas.create_text(10,10,fill='black',font='Times 16',text=content,anchor=NW)
draw.text((10-1, 10+1), text=content, font=font, fill='white')
draw.text((10+1, 10+1), text=content, font=font, fill='white')
draw.text((10-1, 10-1), text=content, font=font, fill='white')
draw.text((10+1, 10-1), text=content, font=font, fill='white')
#draw.text((10,10),text=content,font=font,fill=(141,2,31,0))
draw.text((10,10),text=content,font=font,fill='black')
#image.save(originfile+'-countresult'+extension,"JPEG")
#firstimg=Multigraybands[currentfilename]
#height,width=firstimg.size
height,width,channel=batch_displaybandarray[filename]['LabOstu'].shape
ratio=batch_findratio([height,width],[850,850])
#if labels.shape[0]*labels.shape[1]<850*850:
# disimage=image.resize([int(labels.shape[1]*ratio),int(labels.shape[0]*ratio)],resample=Image.BILINEAR)
#else:
# disimage=image.resize([int(labels.shape[1]/ratio),int(labels.shape[0]/ratio)],resample=Image.BILINEAR)
print('show counting ratio',ratio)
if height*width<850*850:
print('showcounting small')
disimage=image.resize([int(width*ratio),int(height*ratio)],resample=Image.BILINEAR)
else:
print('showcounting big')
disimage=image.resize([int(width/ratio),int(height/ratio)],resample=Image.BILINEAR)
print('showcounting shape',disimage.size)
displayoutput=ImageTk.PhotoImage(disimage)
disimage.save('output.gif',append_images=[disimage])
#image.save('originoutput.gif',append_images=[image])
return displayoutput,image,disimage
def batch_savePCAimg(path,originfile,file):
originpcabands=batch_displaybandarray[file]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcakeys=pcs
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
displaylabels=np.mean(tempband,axis=2)
# generateimgplant(displaylabels)
# grayimg=(((displaylabels-displaylabels.min())/(displaylabels.max()-displaylabels.min()))*255.9).astype(np.uint8)
# pyplt.imsave('k=1.png',displaylabels.astype('uint8'))
# pyplt.imsave('k=1.png',grayimg)
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
originheight,originwidth=batch_Multigraybands[file].size
origingray=grayimg.resize([originwidth,originheight],resample=Image.BILINEAR)
origingray.save(path+'/'+originfile+'-PCAimg.png',"PNG")
# addcolorstrip()
return
def batch_export_ext(path,file,whext=False,blkext=False):
global kernersizes
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
suggsize=8
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
# kernersizes={}
# for file in batch_filenames:
labeldict=batch_results[file][0]
itervalue='iter0'
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
colortable=labeldict[itervalue]['colortable']
head_tail=os.path.split(file)
originfile,extension=os.path.splitext(head_tail[1])
if len(path)>0:
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=batch_showcounting(tup,False,True,True,whext,blkext)
imageband=segimg
draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
tempdict={}
pixelmmratio=1.0
print('pixelmmratio',pixelmmratio)
if file not in kernersizes:
for uni in uniquelabels:
if uni !=0:
pixelloc = np.where(labels == float(uni))
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
print(ulx, uly, rlx, rly)
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
length={}
currborder=tkintercore.get_boundaryloc(labels,uni)
# print('currborder',currborder)
print('currborder length',len(currborder[0])*len(currborder[1]))
pixperc=float(len(pixelloc[0])/(labels.shape[0]*labels.shape[1]))
print('pix length percentage',pixperc)
if pixperc>0.06:
x0=ulx
y0=uly
x1=rlx
y1=rly
kernellength=float(((x0-x1)**2+(y0-y1)**2)**0.5)
else:
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
templength=float(((currborder[0][i]-currborder[0][j])**2+(currborder[1][i]-currborder[1][j])**2)**0.5)
length.update({(i,j):templength})
sortedlength=sorted(length,key=length.get,reverse=True)
try:
topcouple=sortedlength[0]
except:
continue
kernellength=length[topcouple]
i=topcouple[0]
j=topcouple[1]
x0=currborder[1][i]
y0=currborder[0][i]
x1=currborder[1][j]
y1=currborder[0][j]
#slope=float((y0-y1)/(x0-x1))
linepoints=[(currborder[1][i],currborder[0][i]),(currborder[1][j],currborder[0][j])]
#draw.line(linepoints,fill='yellow')
#points=linepixels(currborder[1][i],currborder[0][i],currborder[1][j],currborder[0][j])
lengthpoints=cal_kernelsize.bresenhamline(x0,y0,x1,y1) #x0,y0,x1,y1
for point in lengthpoints:
# if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='yellow')
tengentaddpoints=cal_kernelsize.tengentadd(x0,y0,x1,y1,rlx,rly,labels,uni) #find tangent line above
#for point in tengentaddpoints:
#if int(point[0])>=ulx and int(point[0])<=rlx and int(point[1])>=uly and int(point[1])<=rly:
# draw.point([int(point[0]),int(point[1])],fill='green')
tengentsubpoints=cal_kernelsize.tengentsub(x0,y0,x1,y1,ulx,uly,labels,uni) #find tangent line below
#for point in tengentsubpoints:
# draw.point([int(point[0]),int(point[1])],fill='green')
pointmatchdict={}
for i in range(len(tengentaddpoints)): #find the pixel pair with shortest distance
width=kernellength
pointmatch=[]
point=tengentaddpoints[i]
try:
templabel=labels[int(point[1]),int(point[0])]
except:
continue
if templabel==uni:
for j in range(len(tengentsubpoints)):
subpoint=tengentsubpoints[j]
tempwidth=float(((point[0]-subpoint[0])**2+(point[1]-subpoint[1])**2)**0.5)
if tempwidth<width:
pointmatch[:]=[]
pointmatch.append(point)
pointmatch.append(subpoint)
#print('tempwidth',width)
width=tempwidth
if len(pointmatch)>0:
#print('pointmatch',pointmatch)
pointmatchdict.update({(pointmatch[0],pointmatch[1]):width})
widthsort=sorted(pointmatchdict,key=pointmatchdict.get,reverse=True)
try:
pointmatch=widthsort[0]
print('final pointmatch',pointmatch)
except:
continue
if len(pointmatch)>0:
x0=int(pointmatch[0][0])
y0=int(pointmatch[0][1])
x1=int(pointmatch[1][0])
y1=int(pointmatch[1][1])
# if imgtypevar.get()=='0':
draw.line([(x0,y0),(x1,y1)],fill='yellow')
width=float(((x0-x1)**2+(y0-y1)**2)**0.5)
print('width',width,'length',kernellength)
print('kernelwidth='+str(width*pixelmmratio))
print('kernellength='+str(kernellength*pixelmmratio))
#print('kernelwidth='+str(kernelwidth*pixelmmratio))
tempdict.update({uni:[kernellength,width,pixelmmratio**2*len(pixelloc[0]),kernellength*pixelmmratio,width*pixelmmratio]})
if uni in colortable:
canvastext = str(colortable[uni])
else:
canvastext = 'No label'
# if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=smallfont,fill='black')
#print(event.x, event.y, labels[event.x, event.y], ulx, uly, rlx, rly)
#recborder = canvas.create_rectangle(ulx, uly, rlx, rly, outline='red')
#drawcontents.append(recborder)
kernersizes.update({file:tempdict})
originheight,originwidth=batch_Multigraybands[file].size
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
extcolor=""
if whext==True:
extcolor= "-extwht"
if blkext==True:
extcolor="-extblk"
image.save(path+'/'+originfile+extcolor+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=batch_showcounting(tup,False,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=batch_showcounting(tup,True,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-labelresult'+'.png',"PNG")
def batch_export_result(path,file):
global kernersizes
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
suggsize=8
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
kernersizes={}
# path=filedialog.askdirectory()
batch_export_ext(path,file,True,False)
batch_export_ext(path,file,False,True)
# for file in batch_filenames:
labeldict=batch_results[file][0]
itervalue='iter0'
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
colortable=labeldict[itervalue]['colortable']
head_tail=os.path.split(file)
originfile,extension=os.path.splitext(head_tail[1])
if len(path)>0:
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=batch_showcounting(tup,False)
#imageband=outputimgbands[file][itervalue]
imageband=segimg
draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
# tempdict={}
pixelmmratio=1.0
#print('coinsize',coinsize.get(),'pixelmmratio',pixelmmratio)
print('pixelmmratio',pixelmmratio)
# for uni in uniquelabels:
# if uni !=0:
# pixelloc = np.where(labels == float(uni))
# try:
# ulx = min(pixelloc[1])
# except:
# continue
# uly = min(pixelloc[0])
# rlx = max(pixelloc[1])
# rly = max(pixelloc[0])
# print(ulx, uly, rlx, rly)
# midx = ulx + int((rlx - ulx) / 2)
# midy = uly + int((rly - uly) / 2)
# length={}
# currborder=tkintercore.get_boundaryloc(labels,uni)
# for i in range(len(currborder[0])):
# for j in range(i+1,len(currborder[0])):
# templength=float(((currborder[0][i]-currborder[0][j])**2+(currborder[1][i]-currborder[1][j])**2)**0.5)
# length.update({(i,j):templength})
# sortedlength=sorted(length,key=length.get,reverse=True)
# try:
# topcouple=sortedlength[0]
# except:
# continue
# kernellength=length[topcouple]
# i=topcouple[0]
# j=topcouple[1]
# x0=currborder[1][i]
# y0=currborder[0][i]
# x1=currborder[1][j]
# y1=currborder[0][j]
# #slope=float((y0-y1)/(x0-x1))
# linepoints=[(currborder[1][i],currborder[0][i]),(currborder[1][j],currborder[0][j])]
# #draw.line(linepoints,fill='yellow')
# #points=linepixels(currborder[1][i],currborder[0][i],currborder[1][j],currborder[0][j])
#
# lengthpoints=cal_kernelsize.bresenhamline(x0,y0,x1,y1) #x0,y0,x1,y1
# for point in lengthpoints:
# # if imgtypevar.get()=='0':
# draw.point([int(point[0]),int(point[1])],fill='yellow')
# tengentaddpoints=cal_kernelsize.tengentadd(x0,y0,x1,y1,rlx,rly,labels,uni) #find tangent line above
# #for point in tengentaddpoints:
# #if int(point[0])>=ulx and int(point[0])<=rlx and int(point[1])>=uly and int(point[1])<=rly:
# # draw.point([int(point[0]),int(point[1])],fill='green')
# tengentsubpoints=cal_kernelsize.tengentsub(x0,y0,x1,y1,ulx,uly,labels,uni) #find tangent line below
# #for point in tengentsubpoints:
# # draw.point([int(point[0]),int(point[1])],fill='green')
# pointmatchdict={}
# for i in range(len(tengentaddpoints)): #find the pixel pair with shortest distance
# width=kernellength
# pointmatch=[]
# point=tengentaddpoints[i]
# try:
# templabel=labels[int(point[1]),int(point[0])]
# except:
# continue
# if templabel==uni:
# for j in range(len(tengentsubpoints)):
# subpoint=tengentsubpoints[j]
# tempwidth=float(((point[0]-subpoint[0])**2+(point[1]-subpoint[1])**2)**0.5)
# if tempwidth<width:
# pointmatch[:]=[]
# pointmatch.append(point)
# pointmatch.append(subpoint)
# #print('tempwidth',width)
# width=tempwidth
# if len(pointmatch)>0:
# #print('pointmatch',pointmatch)
# pointmatchdict.update({(pointmatch[0],pointmatch[1]):width})
# widthsort=sorted(pointmatchdict,key=pointmatchdict.get,reverse=True)
# try:
# pointmatch=widthsort[0]
# print('final pointmatch',pointmatch)
# except:
# continue
# if len(pointmatch)>0:
# x0=int(pointmatch[0][0])
# y0=int(pointmatch[0][1])
# x1=int(pointmatch[1][0])
# y1=int(pointmatch[1][1])
# # if imgtypevar.get()=='0':
# draw.line([(x0,y0),(x1,y1)],fill='yellow')
# width=float(((x0-x1)**2+(y0-y1)**2)**0.5)
# print('width',width,'length',kernellength)
# print('kernelwidth='+str(width*pixelmmratio))
# print('kernellength='+str(kernellength*pixelmmratio))
# #print('kernelwidth='+str(kernelwidth*pixelmmratio))
# tempdict.update({uni:[kernellength,width,pixelmmratio**2*len(pixelloc[0]),kernellength*pixelmmratio,width*pixelmmratio]})
# if uni in colortable:
# canvastext = str(colortable[uni])
# else:
# canvastext = 'No label'
# # if imgtypevar.get()=='0':
# draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
# draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
# draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
# draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
# #draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
# draw.text((midx,midy),text=canvastext,font=smallfont,fill='black')
#print(event.x, event.y, labels[event.x, event.y], ulx, uly, rlx, rly)
#recborder = canvas.create_rectangle(ulx, uly, rlx, rly, outline='red')
#drawcontents.append(recborder)
# kernersizes.update({file:tempdict})
originheight,originwidth=batch_Multigraybands[file].size
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
image.save(path+'/'+originfile+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],file)
_band,segimg,small_segimg=batch_showcounting(tup,False)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=batch_showcounting(tup,True)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+'-labelresult'+'.png',"PNG")
originrestoredband=np.copy(labels)
restoredband=originrestoredband.astype('uint8')
colordicesband=batch_colordicesband[file]
colordiv=np.zeros((colordicesband.shape[0],colordicesband.shape[1],3))
batch_savePCAimg(path,originfile,file)
# kvar=int(kmeans.get())
# print('kvar',kvar)
# for i in range(kvar):
# locs=np.where(colordicesband==i)
# colordiv[locs]=colorbandtable[i]
# colordivimg=Image.fromarray(colordiv.astype('uint8'))
# colordivimg.save(path+'/'+originfile+'-colordevice'+'.jpeg',"JPEG")
colordivimg=Image.open(file+'-allcolorindex.png')
copycolordiv=colordivimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copycolordiv.save(path+'/'+originfile+'-colordevice'+'.png',"PNG")
#pyplt.imsave(path+'/'+originfile+'-colordevice'+'.png',colordiv.astype('uint8'))
# copybinary=np.zeros((originbinaryimg.shape[0],originbinaryimg.shape[1],3),dtype='float')
# nonzeros=np.where(originbinaryimg==1)
# copybinary[nonzeros]=[255,255,0]
# binaryimg=Image.fromarray(copybinary.astype('uint8'))
binaryimg=Image.open(file+'-binaryimg.png')
copybinaryimg=binaryimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copybinaryimg.save(path+'/'+originfile+'-binaryimg'+'.png',"PNG")
# pyplt.imsave(path+'/'+originfile+'-binaryimg'+'.png',originbinaryimg.astype('uint8'))
#restoredband=cv2.resize(src=restoredband,dsize=(originwidth,originheight),interpolation=cv2.INTER_LINEAR)
print(restoredband.shape)
currentsizes=kernersizes[file]
indicekeys=list(batch_originbandarray[file].keys())
indeclist=[ 0 for i in range(len(indicekeys)*3)]
pcalist=[0 for i in range(3)]
temppcabands=np.zeros((batch_originpcabands[file].shape[0],len(pcs)))
for i in range(len(pcs)):
temppcabands[:,i]=temppcabands[:,i]+batch_originpcabands[file][:,pcs[i]-1]
pcabands=np.mean(temppcabands,axis=1)
pcabands=pcabands.reshape((originheight,originwidth))
datatable={}
origindata={}
for key in indicekeys:
data=batch_originbandarray[file][key]
data=data.tolist()
tempdict={key:data}
origindata.update(tempdict)
print(key)
# for uni in colortable:
print(uniquelabels)
print('len uniquelabels',len(uniquelabels))
for uni in uniquelabels:
print(uni,colortable[uni])
uniloc=np.where(labels==float(uni))
if len(uniloc)==0 or len(uniloc[1])==0:
print('no uniloc\n')
print(uniloc[0],uniloc[1])
continue
smalluniloc=np.where(originrestoredband==uni)
ulx,uly=min(smalluniloc[1]),min(smalluniloc[0])
rlx,rly=max(smalluniloc[1]),max(smalluniloc[0])
width=rlx-ulx
length=rly-uly
print(width,length)
subarea=restoredband[uly:rly+1,ulx:rlx+1]
subarea=subarea.tolist()
amount=len(uniloc[0])
print(amount)
try:
sizes=currentsizes[uni]
except:
print('no sizes\n')
continue
#templist=[amount,length,width]
templist=[amount,sizes[0],sizes[1],sizes[2],sizes[3],sizes[4]]
tempdict={colortable[uni]:templist+indeclist+pcalist} #NIR,Redeyes,R,G,B,NDVI,area
print(tempdict)
for ki in range(len(indicekeys)):
originNDVI=origindata[indicekeys[ki]]
print(len(originNDVI),len(originNDVI[0]))
pixellist=[]
for k in range(len(uniloc[0])):
#print(uniloc[0][k],uniloc[1][k])
try:
tempdict[colortable[uni]][6+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
tempdict[colortable[uni]][7+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
pixellist.append(originNDVI[uniloc[0][k]][uniloc[1][k]])
tempdict[colortable[uni]][ki*3+6]=tempdict[colortable[uni]][ki*3+6]/amount
tempdict[colortable[uni]][ki*3+8]=np.std(pixellist)
pixellist=[]
for k in range(len(uniloc[0])):
try:
tempdict[colortable[uni]][-2]+=pcabands[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
tempdict[colortable[uni]][-3]+=pcabands[uniloc[0][k]][uniloc[1][k]]
pixellist.append(pcabands[uniloc[0][k]][uniloc[1][k]])
tempdict[colortable[uni]][-3]=tempdict[colortable[uni]][-3]/amount
tempdict[colortable[uni]][-1]=np.std(pixellist)
datatable.update(tempdict)
filename=path+'/'+originfile+'-outputdata.csv'
with open(filename,mode='w') as f:
csvwriter=csv.writer(f)
rowcontent=['Index','Plot','Area(#pixel)','Length(#pixel)','Width(#pixel)','Area(mm2)','Length(mm)','Width(mm)']
for key in indicekeys:
rowcontent.append('avg-'+str(key))
rowcontent.append('sum-'+str(key))
rowcontent.append('std-'+str(key))
rowcontent.append('avg-PCA')
rowcontent.append('sum-PCA')
rowcontent.append('std-PCA')
#csvwriter.writerow(['ID','NIR','Red Edge','Red','Green','Blue','NIRv.s.Green','LabOstu','area(#of pixel)'])
#csvwriter.writerow(['Index','Plot','Area(#pixels)','avg-NDVI','sum-NDVI','std-NDVI','Length(#pixel)','Width(#pixel)'])#,'#holes'])
csvwriter.writerow(rowcontent)
i=1
for uni in datatable:
row=[i,uni]
for j in range(len(datatable[uni])):
row.append(datatable[uni][j])
#row=[i,uni,datatable[uni][0],datatable[uni][1],datatable[uni][2],datatable[uni][5],datatable[uni][3],datatable[uni][4]]#,
#datatable[uni][5]]
i+=1
print(row)
csvwriter.writerow(row)
print('total data length=',len(datatable))
# messagebox.showinfo('Saved',message='Results are saved to '+path)
# tx=root.winfo_x()
# ty=root.winfo_y()
# top=Toplevel()
# top.attributes("-topmost",True)
# w = 300
# h = 150
# dx=100
# dy=100
# top.geometry("%dx%d+%d+%d" % (w, h, tx + dx, ty + dy))
# top.title('Saved')
# Message(top,text='Results are saved to '+path,padx=20,pady=20).pack()
# okbut=Button(top,text='Okay',command=top.destroy)
# okbut.pack(side=BOTTOM)
# top.after(10000,top.destroy)
# batchfile=path+'/'+originfile+'-batch'+'.txt'
# with open(batchfile,'w') as f:
# for key in batch.keys():
# f.write(key)
# f.write(',')
# for i in range(len(batch[key])):
# f.write(str(batch[key][i]))
# f.write(',')
# f.write('\n')
# f.close()
def batch_exportpath():
global exportpath
exportpath=filedialog.askdirectory()
while len(exportpath)==0:
exportpath=filedialog.askdirectory()
```
#### File: 12HuYang/GridFree/histograms.py
```python
import tkinter
import axistest
def plot ( data,hist,bin_edges,canvas ) :
numberOfBins = len ( data )
#root = tkinter.Tk ( )
#width , height = 400 , 350
#canvas = tkinter.Canvas ( root , width = width , height = height )
#canvas.pack ( )
#numberOfStripes = 2 * numberOfBins + 1
numberOfStripes = numberOfBins + 1
barWidth = (400-50) / (numberOfStripes)
unitHeight = 300 / ( max ( [ datum[1] for datum in data ] ) )
lastx=0
for i in range ( numberOfBins ) :
#ulx=( 2 * i + 1 ) * barWidth
if i==0:
#ulx=(i+1)*barWidth
ulx=25
#rlx=(i+2)*barWidth
rlx=25+barWidth
else:
ulx=lastx
rlx=lastx+barWidth
#uly=height - unitHeight -12
uly=325
#rlx=( 2 * i + 2 ) * barWidth
#rlx=(i+2)*(2*barWidth)
lastx=rlx
rly=325 - ( data[i][1] ) * unitHeight
print(ulx,uly,rlx,rly)
canvas.create_rectangle (ulx,uly,rlx,rly,
fill = 'blue' )
axistest.drawPlot(25,uly,25+numberOfBins*barWidth,325-(max([datum[1] for datum in data]))*unitHeight,hist,bin_edges,canvas)
return 25,25+numberOfBins*barWidth
#root.mainloop ( )
if __name__ == '__main__' :
plot ( [
[ '1--2' , 1 ] ,
[ '2--3' , 3 ] ,
[ '3--4' , 1 ]
] )
```
#### File: 12HuYang/GridFree/tkinterGUI_nw.py
```python
from tkinter import *
from tkinter import ttk
import tkinter.filedialog as filedialog
from tkinter import messagebox
from PIL import Image,ImageDraw,ImageFont
from PIL import ImageTk,ImageGrab
import cv2
from skimage import filters
#import rasterio
import matplotlib.pyplot as pyplt
#from matplotlib.figure import Figure
import numpy as np
import os
#import time
import csv
import scipy.linalg as la
from functools import partial
#import threading
#import sys
#import kplus
from sklearn.cluster import KMeans
import tkintercorestat
#import tkintercorestat_plot
import tkintercore
import cal_kernelsize
#import histograms
#import createBins
import axistest
#from multiprocessing import Pool
import lm_method
#import batchprocess
import sel_area
class img():
def __init__(self,size,bands):
self.size=size
self.bands=bands
import batchprocess
displayimg={'Origin':None,
'PCs':None,
'Color Deviation':None,
'ColorIndices':None,
'Output':None}
previewimg={'Color Deviation':None,
'ColorIndices':None}
#cluster=['LabOstu','NDI'] #,'Greenness','VEG','CIVE','MExG','NDVI','NGRDI','HEIGHT']
#cluster=['LabOstu','NDI','Greenness','VEG','CIVE','MExG','NDVI','NGRDI','HEIGHT','Band1','Band2','Band3']
cluster=['PAT_R','PAT_G','PAT_B',
'DIF_R','DIF_G','DIF_B',
'ROO_R','ROO_G','ROO_B',
'GLD_R','GLD_G','GLD_B',
'Band1','Band2','Band3']
colorbandtable=np.array([[255,0,0],[255,127,0],[255,255,0],[127,255,0],[0,255,255],[0,127,255],[0,0,255],[127,0,255],[75,0,130],[255,0,255]],'uint8')
#print('colortableshape',colortable.shape)
filenames=[]
Multiimage={}
Multigray={}
Multitype={}
Multiimagebands={}
Multigraybands={}
workbandarray={}
displaybandarray={}
originbandarray={}
colorindicearray={}
clusterdisplay={}
kernersizes={}
multi_results={}
outputimgdict={}
outputimgbands={}
outputsegbands={}
originsegbands={}
oldpcachoice=[]
multiselectitems=[]
coinbox_list=[]
pre_checkbox=[]
originpcabands={}
batch={'PCweight':[],
'PCsel':[],
'Kmeans':[],
'Kmeans_sel':[],
'Area_max':[],
'Area_min':[],
'shape_max':[],
'shape_min':[],
'nonzero':[]}
root=Tk()
root.title('GridFree v.1.1.0 ')
root.geometry("")
root.option_add('*tearoff',False)
emptymenu=Menu(root)
root.config(menu=emptymenu)
screenheight=root.winfo_screenheight()
screenwidth=root.winfo_screenwidth()
print('screenheight',screenheight,'screenwidth',screenwidth)
screenstd=min(screenheight-100,screenwidth-100,850)
coinsize=StringVar()
selarea=StringVar()
refvar=StringVar()
imgtypevar=StringVar()
edge=StringVar()
kmeans=IntVar()
pc_combine_up=DoubleVar()
pc_combine_down=IntVar()
filedropvar=StringVar()
displaybut_var=StringVar()
buttonvar=IntVar()
bandchoice={}
checkboxdict={}
#minipixelareaclass=0
coinbox=None
currentfilename=''
currentlabels=None
displaylabels=None
workingimg=None
displaypclabels=None
boundaryarea=None
outputbutton=None
font=None
reseglabels=None
coindict=None
## Funcitons
refarea=None
originlabels=None
originlabeldict=None
changekmeans=False
convband=None
reflabel=0
minflash=[]
dotflash=[]
labelplotmap={}
mappath=''
elesize=[]
labellist=[]
figdotlist={}
havecolorstrip=True
kmeanschanged=False
pcweightchanged=False
originbinaryimg=None
clusterchanged=False
originselarea=False
zoomoff=False
maxx=0
minx=0
bins=None
loccanvas=None
linelocs=[0,0,0,0]
maxy=0
miny=0
segmentratio=0
zoombox=[]
displayfea_l=0
displayfea_w=0
resizeshape=[]
previewshape=[]
pcbuttons=[]
pcbuttonsgroup=[]
def distance(p1,p2):
return np.sum((p1-p2)**2)
def findratio(originsize,objectsize):
oria=originsize[0]
orib=originsize[1]
obja=objectsize[0]
objb=objectsize[1]
if oria>obja or orib>objb:
ratio=round(max((oria/obja),(orib/objb)))
else:
ratio=round(min((obja/oria),(objb/orib)))
# if oria*orib>850 * 850:
if oria*orib>screenstd * screenstd:
if ratio<2:
ratio=2
return ratio
def getkeys(dict):
return [*dict]
def deletezoom(event,widget):
print('leave widget')
if len(zoombox)>0:
for i in range(len(zoombox)):
#print('delete')
widget.delete(zoombox.pop(0))
widget.update()
def zoom(event,widget,img):
global zoombox
x=event.x
y=event.y
#print(x,y)
if len(zoombox)>1:
widget.delete(zoombox.pop(0))
#print('delete')
crop=img.crop((x-15,y-15,x+15,y+15))
w,h=crop.size
#print(w,h)
crop=crop.resize([w*3,h*3],resample=Image.BILINEAR)
w,h=crop.size
crop=ImageTk.PhotoImage(crop)
zoombox.append(widget.create_image(x+5,y-5,image=crop))
root.update_idletasks()
raise NameError
#time.sleep(0.1)
def changedisplay_pc(frame):
for widget in frame.winfo_children():
widget.pack_forget()
#widget.configure(image=displayimg[text])
#widget.image=displayimg[text]
#widget.pack()
w=displayimg['PCs']['Size'][1]
l=displayimg['PCs']['Size'][0]
widget.config(width=w,height=l)
widget.create_image(0,0,image=displayimg['PCs']['Image'],anchor=NW)
widget.pack()
widget.update()
def pcweightupdate(displayframe):
getPCs()
changedisplay_pc(displayframe)
def buttonpress(val,displayframe,buttonframe):
global buttonvar,pc_combine_up,kmeans
buttonvar.set(val)
kmeans.set(1)
pc_combine_up.set(0.5)
buttonchildren=buttonframe.winfo_children()
for child in buttonchildren:
child.config(highlightbackground='white')
print(buttonchildren[val])
buttonchild=buttonchildren[val]
buttonchild.config(highlightbackground='red')
print('press button ',buttonvar.get())
getPCs()
changedisplay_pc(displayframe)
# if kmeans.get()>1:
changekmeansbar('')
beforecluster('')
# changecluster('')
def PCbuttons(frame,displayframe):
#display pc buttons
# buttonvar=IntVar()
#buttonvar.set(0)
for widget in frame.winfo_children():
widget.pack_forget()
buttonframe=LabelFrame(frame)
buttonframe.pack()
for i in range(len(pcbuttons)):
butimg=pcbuttons[i]
but=Button(buttonframe,text='',image=butimg,compound=TOP,command=partial(buttonpress,i,displayframe,buttonframe))
if i==buttonvar.get():
but.config(highlightbackground='red')
row=int(i/3)
col=i%3
# print(row,col)
but.grid(row=int(i/3),column=col)
print('default button',buttonvar.get())
# change cluster,display
def displaypreview(text):
global figcanvas,resviewframe
for widget in resviewframe.winfo_children():
widget.pack_forget()
# previewframe=Canvas(frame,width=450,height=400,bg='white')
figcanvas.pack()
figcanvas.delete(ALL)
if text=='Color Deviation':
previewtext='ColorIndices'
if text=='ColorIndices':
previewtext='Color Deviation'
previewimage=previewimg[previewtext]['Image']
figcanvas.create_image(0,0,image=previewimage,anchor=NW)
figcanvas.update()
def switchevent(event,widget,img):
global zoomoff,zoomfnid_m,zoomfnid_l,zoombox
zoomoff= not zoomoff
if zoomoff==True:
widget.unbind('<Motion>',zoomfnid_m)
widget.unbind('<Leave>',zoomfnid_l)
if len(zoombox)>0:
for i in range(len(zoombox)):
widget.delete(zoombox.pop(0))
widget.update()
else:
zoomfnid_m=widget.bind('<Motion>',lambda event,arg=widget:zoom(event,arg,img))
zoomfnid_l=widget.bind('<Leave>',lambda event,arg=widget:deletezoom(event,arg))
def changedisplayimg(frame,text):
global displaybut_var,figcanvas,resviewframe,reflabel
displaybut_var.set(disbuttonoption[text])
for widget in frame.winfo_children():
widget.pack_forget()
#widget.configure(image=displayimg[text])
#widget.image=displayimg[text]
#widget.pack()
w=displayimg[text]['Size'][1]
l=displayimg[text]['Size'][0]
widget.config(width=w,height=l)
widget.create_image(0,0,image=displayimg[text]['Image'],anchor=NW)
widget.pack()
widget.update()
global rects,selareapos,app,delapp,delrects,delselarea,originselarea
global zoomfnid_m,zoomfnid_l
app=sel_area.Application(widget)
# delapp=sel_area.Application(widget)
if text=='Output':
try:
image=outputsegbands[currentfilename]['iter0']
displayfig()
except:
return
zoomfnid_m=widget.bind('<Motion>',lambda event,arg=widget:zoom(event,arg,image))
zoomfnid_l=widget.bind('<Leave>',lambda event,arg=widget:deletezoom(event,arg))
delrects=app.start(zoomfnid_m,zoomfnid_l)
widget.bind('<Double-Button-1>',lambda event,arg=widget:switchevent(event,arg,image))
print('delrects',delrects)
else:
reflabel=0
print('reflabel=',reflabel)
try:
delelareadim=app.getinfo(delrects[1])
if delelareadim!=[]:
delselarea=delelareadim
app.end()
except:
pass
if text=='Origin':
try:
image=originsegbands['Origin']
zoomfnid_m=widget.bind('<Motion>',lambda event,arg=widget:zoom(event,arg,image))
zoomfnid_l=widget.bind('<Leave>',lambda event,arg=widget:deletezoom(event,arg))
except:
return
widget.bind('<Double-Button-1>',lambda event,arg=widget:switchevent(event,arg,image))
for widget in resviewframe.winfo_children():
widget.pack_forget()
rects=app.start()
print(rects)
originselarea=True
else:
widget.unbind('<Motion>')
selareadim=app.getinfo(rects[1])
if selareadim!=[]:
selareapos=selareadim
app.end(rects)
if text=='PCs':
selareadim=app.getinfo(rects[1])
if selareadim!=[0,0,1,1] and selareadim!=[] and selareadim!=selareapos:
selareapos=selareadim
if selareapos!=[0,0,1,1] and originselarea==True:
#need to redo PCA
npfilter=np.zeros((displayimg['Origin']['Size'][0],displayimg['Origin']['Size'][1]))
filter=Image.fromarray(npfilter)
draw=ImageDraw.Draw(filter)
draw.ellipse(selareapos,fill='red')
filter=np.array(filter)
filter=np.divide(filter,np.max(filter))
filter=cv2.resize(filter,(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)
partialsingleband(filter)
originselarea=False
pass
PCbuttons(resviewframe,frame)
pass
if text=='Color Deviation':
#displaypreview
displaypreview(text)
pass
if text=='ColorIndices':
#displaypreview
displaypreview(text)
pass
#print('change to '+text)
#time.sleep(1)
def updateresizeshape(shape,content):
shape.append(int(content))
return shape
def generatedisplayimg(filename): # init display images
global resizeshape,previewshape
try:
# firstimg=Multiimagebands[filename]
#height,width=firstimg.size
# height,width,c=displaybandarray[filename]['LabOstu'].shape
bandsize=Multiimagebands[filename].size
if bandsize[0]*bandsize[1]>2000*2000:
ratio=findratio([bandsize[0],bandsize[1]],[2000,2000])
else:
ratio=1
height,width=bandsize[0]/ratio,bandsize[1]/ratio
# ratio=findratio([height,width],[850,850])
ratio=findratio([height,width],[screenstd,screenstd])
print('displayimg ratio',ratio)
resizeshape=[]
# if height*width<850*850:
if height*width<screenstd*screenstd:
#resize=cv2.resize(Multiimage[filename],(int(width*ratio),int(height*ratio)),interpolation=cv2.INTER_LINEAR)
updateresizeshape(resizeshape,width*ratio)
updateresizeshape(resizeshape,height*ratio)
# resizeshape.append(width*ratio)
# resizeshape.append(height*ratio)
if height>screenstd:
resizeshape=[]
ratio=round(height/screenstd)
updateresizeshape(resizeshape,width*ratio)
updateresizeshape(resizeshape,height*ratio)
if width>screenstd:
resizeshape=[]
ratio=round(width/screenstd)
updateresizeshape(resizeshape,width*ratio)
updateresizeshape(resizeshape,height*ratio)
else:
#resize=cv2.resize(Multiimage[filename],(int(width/ratio),int(height/ratio)),interpolation=cv2.INTER_LINEAR)
updateresizeshape(resizeshape,width/ratio)
updateresizeshape(resizeshape,height/ratio)
ratio=findratio([height,width],[400,450])
previewshape=[]
if height*width<450*400:
#resize=cv2.resize(Multiimage[filename],(int(width*ratio),int(height*ratio)),interpolation=cv2.INTER_LINEAR)
updateresizeshape(previewshape,width*ratio)
updateresizeshape(previewshape,height*ratio)
if height>400:
previewshape=[]
ratio=round(height/screenstd)
updateresizeshape(previewshape,width/ratio)
updateresizeshape(previewshape,height/ratio)
if width>450:
previewshape=[]
ratio=round(width/screenstd)
updateresizeshape(previewshape,width/ratio)
updateresizeshape(previewshape,height/ratio)
else:
#resize=cv2.resize(Multiimage[filename],(int(width/ratio),int(height/ratio)),interpolation=cv2.INTER_LINEAR)
updateresizeshape(previewshape,width/ratio)
updateresizeshape(previewshape,height/ratio)
resize=cv2.resize(Multiimage[filename],(int(resizeshape[0]),int(resizeshape[1])),interpolation=cv2.INTER_LINEAR)
originimg=Image.fromarray(resize.astype('uint8'))
originsegbands.update({'Origin':originimg})
rgbimg=Image.fromarray(resize.astype('uint8'))
draw=ImageDraw.Draw(rgbimg)
suggsize=14
font=ImageFont.truetype('cmb10.ttf',size=suggsize)
content='\n File: '+filename
draw.text((10-1, 10+1), text=content, font=font, fill='white')
draw.text((10+1, 10+1), text=content, font=font, fill='white')
draw.text((10-1, 10-1), text=content, font=font, fill='white')
draw.text((10+1, 10-1), text=content, font=font, fill='white')
#draw.text((10,10),text=content,font=font,fill=(141,2,31,0))
draw.text((10,10),text=content,font=font,fill='black')
rgbimg=ImageTk.PhotoImage(rgbimg)
tempdict={}
tempdict.update({'Size':resize.shape})
tempdict.update({'Image':rgbimg})
except:
tempdict={}
tempimg=np.zeros((screenstd,screenstd))
tempdict.update({'Size':tempimg.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempimg.astype('uint8')))})
displayimg['Origin']=tempdict
#if height*width<850*850:
# resize=cv2.resize(Multigray[filename],(int(width*ratio),int(height*ratio)),interpolation=cv2.INTER_LINEAR)
#else:
#resize=cv2.resize(Multigray[filename],(int(width/ratio),int(height/ratio)),interpolation=cv2.INTER_LINEAR)
tempimg=np.zeros((screenstd,screenstd))
tempdict={}
try:
tempdict.update({'Size':resize.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(np.zeros((int(resizeshape[1]),int(resizeshape[0]))).astype('uint8')))})
except:
tempdict.update({'Size':tempimg.shape})
#if height*width<850*850:
# tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(np.zeros((int(height*ratio),int(width*ratio))).astype('uint8')))})
#else:
# tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(np.zeros((int(height/ratio),int(width/ratio))).astype('uint8')))})
# tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(np.zeros((int(resizeshape[1]),int(resizeshape[0]))).astype('uint8')))})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempimg.astype('uint8')))})
displayimg['Output']=tempdict
tempdict={}
try:
tempdict.update({'Size':resize.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(np.zeros((int(resizeshape[1]),int(resizeshape[0]))).astype('uint8')))})
except:
tempdict.update({'Size':tempimg.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempimg.astype('uint8')))})
displayimg['PCs']=tempdict
tempdict={}
temppreviewdict={}
temppreviewimg=np.zeros((450,400))
try:
tempband=np.zeros((displaybandarray[filename]['LabOstu'][:,:,0].shape))
# tempband=tempband+displaybandarray[filename]['LabOstu']
# ratio=findratio([tempband.shape[0],tempband.shape[1]],[850,850])
#if tempband.shape[0]*tempband.shape[1]<850*850:
# tempband=cv2.resize(ratio,(int(tempband.shape[1]*ratio),int(tempband.shape[0]*ratio)),interpolation=cv2.INTER_LINEAR)
#else:
# tempband=cv2.resize(ratio,(int(tempband.shape[1]/ratio),int(tempband.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
tempband=cv2.resize(tempband,(int(resizeshape[0]),int(resizeshape[1])),interpolation=cv2.INTER_LINEAR)
tempdict.update({'Size':tempband.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempband[:,:,2].astype('uint8')))})
temppreview=cv2.resize(tempband,(int(previewshape[0]),int(previewshape[1])),interpolation=cv2.INTER_LINEAR)
temppreview=Image.fromarray(temppreview.astype('uint8'))
temppreviewdict.update({'Size':previewshape})
temppreviewdict.update({'Image':ImageTk.PhotoImage(temppreview)})
# print('resizeshape',resizeshape)
#pyplt.imsave('displayimg.png',tempband[:,:,0])
#indimg=cv2.imread('displayimg.png')
except:
tempdict.update({'Size':tempimg.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempimg.astype('uint8')))})
temppreviewdict.update({'Size':temppreviewimg.shape})
temppreviewdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(temppreviewimg.astype('uint8')))})
displayimg['ColorIndices']=tempdict
previewimg['ColorIndices']=temppreviewdict
#resize=cv2.resize(Multigray[filename],(int(resizeshape[0]),int(resizeshape[1])),interpolation=cv2.INTER_LINEAR)
#grayimg=ImageTk.PhotoImage(Image.fromarray(resize.astype('uint8')))
#tempdict={}
#tempdict.update({'Size':resize.shape})
#tempdict.update({'Image':grayimg})
tempdict={}
temppreviewdict={}
try:
colordeviate=np.zeros((tempband[:,:,0].shape[0],tempband[:,:,0].shape[1],3),'uint8')
kvar=int(kmeans.get())
for i in range(kvar):
locs=np.where(tempband[:,:,0]==i)
colordeviate[locs]=colorbandtable[i,:]
# pyplt.imsave('colordeviation.png',colordeviate)
# # colordevimg=Image.fromarray(colordeviate.astype('uint8'))
# # colordevimg.save('colordeviation.png',"PNG")
# testcolor=Image.open('colordeviation.png')
print('colordeviation.png')
# colortempdict={}
colordeviate=cv2.resize(colordeviate,(int(resizeshape[0]),int(resizeshape[1])),interpolation=cv2.INTER_LINEAR)
tempdict.update({'Size':colordeviate.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(colordeviate.astype('uint8')))})
# colortempdict.update({'Size':colordeviate.shape})
# colortempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(colordeviate.astype('uint8')))})
# colortempdict.update({'Image':ImageTk.PhotoImage(testcolor)})
# tempdict={}
temppreview=cv2.resize(colordeviate,(int(previewshape[0]),int(previewshape[1])),interpolation=cv2.INTER_LINEAR)
temppreviewdict.update({'Size':temppreview.shape})
temppreviewdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(temppreview[:,:,0].astype('uint8')))})
except:
tempdict.update({'Size':tempimg.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(tempimg.astype('uint8')))})
temppreviewdict.update({'Size':temppreviewimg.shape})
temppreviewdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(temppreviewimg.astype('uint8')))})
# displayimg['Color Deviation']=colortempdict
displayimg['Color Deviation']=tempdict
previewimg['Color Deviation']=temppreviewdict
def Open_File(filename): #add to multi-image,multi-gray #call band calculation
global Multiimage,Multigray,Multitype,Multiimagebands,Multigraybands,filenames
try:
Filersc=cv2.imread(filename,flags=cv2.IMREAD_ANYCOLOR)
ndim=np.ndim(Filersc)
if ndim==2:
height,width=np.shape(Filersc)
channel=1
Filersc.reshape((height,width,channel))
else:
height,width,channel=np.shape(Filersc)
Filesize=(height,width)
print('filesize:',height,width)
RGBfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2RGB)
Multiimage.update({filename:RGBfile})
if ndim==2:
Grayfile=np.copy(Filersc)
else:
Grayfile=cv2.cvtColor(Filersc,cv2.COLOR_BGR2Lab)
Grayfile=cv2.cvtColor(Grayfile,cv2.COLOR_BGR2GRAY)
#Grayfile=cv2.GaussianBlur(Grayfile,(3,3),cv2.BORDER_DEFAULT)
#ostu=filters.threshold_otsu(Grayfile)
#Grayfile=Grayfile.astype('float32')
#Grayfile=Grayfile/ostu
Grayimg=img(Filesize,Grayfile)
RGBbands=np.zeros((channel,height,width))
for j in range(channel):
band=RGBfile[:,:,j]
band=np.where(band==0,1e-6,band)
nans=np.isnan(band)
band[nans]=1e-6
#ostu=filters.threshold_otsu(band)
#band=band/ostu
RGBbands[j,:,:]=band
RGBimg=img(Filesize,RGBbands)
tempdict={filename:RGBimg}
Multiimagebands.update(tempdict)
tempdict={filename:Grayfile}
Multigray.update(tempdict)
tempdict={filename:0}
Multitype.update(tempdict)
tempdict={filename:Grayimg}
Multigraybands.update(tempdict)
except:
messagebox.showerror('Invalid Image Format','Cannot open '+filename)
return False
filenames.append(filename)
return True
def Open_Map():
if proc_mode[proc_name].get()=='1':
batchprocess.Open_batchfile()
return
global mappath,elesize,labellist
filepath=filedialog.askopenfilename()
if len(filepath)>0:
if 'csv' in filepath:
mappath=filepath
elesize=[]
labellist=[]
rows=[]
print('open map at: '+mappath)
with open(mappath,mode='r',encoding='utf-8-sig') as f:
csvreader=csv.reader(f)
for row in csvreader:
rows.append(row)
temprow=[]
for ele in row:
if ele is not '':
temprow.append(ele)
elesize.append(len(temprow))
for i in range(len(rows)):
for j in range(len(rows[i])):
if rows[i][j]!='':
labellist.append(rows[i][j])
else:
messagebox.showerror('Invalide File',message='Please open csv formate file as map file.')
corlortable=tkintercorestat.get_colortable(reseglabels)
tup=(reseglabels,[],corlortable,{},currentfilename)
print(elesize)
mapdict,mapimage,smallset=showcounting(tup,True,True,True)
tempimgbands={}
tempimgdict={}
tempsmall={}
tempimgbands.update({'iter0':mapimage})
tempimgdict.update({'iter0':mapdict})
tempsmall.update({'iter0':smallset})
outputimgdict.update({currentfilename:tempimgdict})
outputimgbands.update({currentfilename:tempimgbands})
outputsegbands.update({currentfilename:tempsmall})
changeoutputimg(currentfilename,'1')
def Open_Multifile():
global extractbutton,outputbutton
if proc_mode[proc_name].get()=='1':
batchprocess.Open_batchfolder()
extractbutton.config(state=NORMAL)
outputbutton.config(state=NORMAL)
return
# else:
# extractbutton.config(state=DISABLED)
global Multiimage,Multigray,Multitype,Multiimagebands,changefileframe,imageframe,Multigraybands,filenames
global changefiledrop,filedropvar,originbandarray,displaybandarray,clusterdisplay,currentfilename,resviewframe
global refsubframe,reseglabels,refbutton,figcanvas,loccanvas,originlabels,changekmeans,refarea
global originlabeldict,convband,panelA
global havecolorstrip
global colordicesband,oldpcachoice
global pccombinebar_up
global displaylabels,displaypclabels
global buttonvar
global colorindicearray
global selarea
MULTIFILES=filedialog.askopenfilenames()
root.update()
if len(MULTIFILES)>0:
Multiimage={}
Multigray={}
Multitype={}
Multiimagebands={}
Multigraybands={}
filenames=[]
originbandarray={}
colorindicearray={}
displaybandarray={}
clusterdisplay={}
oldpcachoice=[]
reseglabels=None
originlabels=None
originlabeldict=None
#changekmeans=True
convband=None
refvar.set('0')
kmeans.set('2')
panelA.delete(ALL)
panelA.unbind('<Button-1>')
panelA.unbind('<Shift-Button-1>')
refarea=None
havecolorstrip=False
displaypclabels=None
buttonvar.set(0)
# if 'NDI' in bandchoice:
# bandchoice['NDI'].set('1')
# if 'NDVI' in bandchoice:
# bandchoice['NDVI'].set('1')
refbutton.config(state=DISABLED)
# selareabutton.configure(state=DISABLED)
selarea.set('0')
figcanvas.delete(ALL)
#loccanvas=None
for widget in refsubframe.winfo_children():
widget.config(state=DISABLED)
#for widget in resviewframe.winfo_children():
# widget.config(state=DISABLED)
if outputbutton is not None:
outputbutton.config(state=DISABLED)
for i in range(len(MULTIFILES)):
if Open_File(MULTIFILES[i])==False:
return
generatedisplayimg(filenames[0])
changedisplayimg(imageframe,'Origin')
# imageframe.update()
# raise NameError
# yield
# thread=threading.Thread(target=singleband,args=(MULTIFILES[i],))
singleband(MULTIFILES[i])
# thread.start()
# thread.join()
for widget in changefileframe.winfo_children():
widget.pack_forget()
currentfilename=filenames[0]
# filedropvar.set(filenames[0])
# changefiledrop=OptionMenu(changefileframe,filedropvar,*filenames,command=partial(changeimage,imageframe))
# changefiledrop.pack()
#singleband(filenames[0])
generatedisplayimg(filenames[0])
# changedisplayimg(imageframe,'Origin')
getPCs()
if len(bandchoice)>0:
for i in range(len(cluster)):
bandchoice[cluster[i]].set('')
#changedisplayimg(imageframe,'Origin')
kmeans.set(1)
#reshapemodified_tif=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0]*displaybandarray[currentfilename]['LabOstu'].shape[1],3))
#colordicesband=kmeansclassify(['LabOstu'],reshapemodified_tif)
displaylabels=kmeansclassify()
generateimgplant('')
changedisplayimg(imageframe,'Origin')
# if len(bandchoice)>0:
# bandchoice['LabOstu'].set('1')
global buttondisplay,pcaframe,kmeansbar
for widget in buttondisplay.winfo_children():
widget.config(state=NORMAL)
# for widget in pcaframe.winfo_children():
# for widget in pcselframe.winfo_children():
# widget.config(state=NORMAL)
extractbutton.config(state=NORMAL)
kmeansbar.state(["!disabled"])
pccombinebar_up.state(["!disabled"])
def fillpartialbands(vector,vectorindex,band,filter_vector):
nonzero=np.where(filter_vector!=0)
vector[nonzero,vectorindex]=vector[nonzero,vectorindex]+band
def fillbands(originbands,displaybands,vector,vectorindex,name,band,filter=0):
tempdict={name:band}
if isinstance(filter,int):
if name not in originbands:
originbands.update(tempdict)
image=cv2.resize(band,(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)
displaydict={name:image}
displaybands.update(displaydict)
fea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
vector[:,vectorindex]=vector[:,vectorindex]+fea_bands
else:
if name not in originbands:
originbands.update(tempdict)
image=cv2.resize(band,(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)
image=np.multiply(image,filter)
displaydict={name:image}
displaybands.update(displaydict)
fea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
vector[:,vectorindex]=vector[:,vectorindex]+fea_bands
return
def plot3d(pcas):
from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import
import matplotlib.pyplot as plt
fig=plt.figure()
ax=fig.add_subplot(111,projection='3d')
x=pcas[:,0]
y=pcas[:,1]
z=pcas[:,2]*0+np.min(pcas[:,2])
ax.scatter(x,y,z,color='tab:purple')
x=pcas[:,0]*0+np.min(pcas[:,0])
y=pcas[:,1]
z=pcas[:,2]
ax.scatter(x,y,z,color='tab:pink')
x=pcas[:,0]
y=pcas[:,1]*0+np.max(pcas[:,1])
z=pcas[:,2]
ax.scatter(x,y,z,color='tab:olive')
ax.set_xlabel('Color Indices PC1')
ax.set_ylabel('Color Indices PC2')
ax.set_zlabel('Color Indices PC3')
# plt.show()
plt.savefig('3dplot_PC.png')
def partialoneband(filter):
global displaybandarray,originpcabands
global pcbuttons
global nonzero_vector,partialpca
partialpca=True
bands=Multiimagebands[currentfilename].bands
channel,fea_l,fea_w=bands.shape
nonzero=np.where(filter!=0)
RGB_vector=np.zeros((displayfea_l*displayfea_w,3))
colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
filter_vector=filter.reshape((displayfea_l*displayfea_w),1)[:,0]
originbands={}
displays={}
Red=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
Green=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
# Red=cv2.adaptiveThreshold(Red,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,11,2)
# Green=cv2.adaptiveThreshold(Green,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
Blue=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
# Blue=cv2.threshold(Blue,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
fillpartialbands(RGB_vector,0,Red,filter_vector)
fillpartialbands(RGB_vector,1,Green,filter_vector)
fillpartialbands(RGB_vector,2,Blue,filter_vector)
PAT_R=Red
PAT_G=Red
PAT_B=Red
ROO_R=Red
ROO_G=Red
ROO_B=Red
DIF_R=Red
DIF_G=Red
DIF_B=Red
GLD_R=Red
GLD_G=Red
GLD_B=Red
fillpartialbands(colorindex_vector,0,PAT_R,filter_vector)
fillpartialbands(colorindex_vector,1,PAT_G,filter_vector)
fillpartialbands(colorindex_vector,2,PAT_B,filter_vector)
fillpartialbands(colorindex_vector,3,ROO_R,filter_vector)
fillpartialbands(colorindex_vector,4,ROO_G,filter_vector)
fillpartialbands(colorindex_vector,5,ROO_B,filter_vector)
fillpartialbands(colorindex_vector,6,DIF_R,filter_vector)
fillpartialbands(colorindex_vector,7,DIF_G,filter_vector)
fillpartialbands(colorindex_vector,8,DIF_B,filter_vector)
fillpartialbands(colorindex_vector,9,GLD_R,filter_vector)
fillpartialbands(colorindex_vector,10,GLD_G,filter_vector)
fillpartialbands(colorindex_vector,11,GLD_B,filter_vector)
nonzero_vector=np.where(filter_vector!=0)
displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
featurechannel=14
# np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%10.5f')
# displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
originpcabands.update({currentfilename:displayfea_vector})
pcabandsdisplay=displayfea_vector[:,:14]
pcabandsdisplay=pcabandsdisplay.reshape(displayfea_l,displayfea_w,featurechannel)
tempdictdisplay={'LabOstu':pcabandsdisplay}
displaybandarray.update({currentfilename:tempdictdisplay})
# originbandarray.update({currentfilename:originbands})
# Red=displays['Band1']
# Green=displays['Band2']
# Blue=displays['Band3']
# convimg=np.zeros((Red.shape[0],Red.shape[1],3))
# convimg[:,:,0]=Red
# convimg[:,:,1]=Green
# convimg[:,:,2]=Blue
# convimg=Image.fromarray(convimg.astype('uint8'))
# convimg.save('convimg.png','PNG')
pcbuttons=[]
need_w=int(450/3)
need_h=int(400/4)
for i in range(2,3):
band=np.copy(pcabandsdisplay[:,:,i])
# imgband=(band-band.min())*255/(band.max()-band.min())
imgband=np.copy(band)
pcimg=Image.fromarray(imgband.astype('uint8'),'L')
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
pcimg.thumbnail((need_w,need_h),Image.ANTIALIAS)
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
# ratio=max(displayfea_l/need_h,displayfea_w/need_w)
# print('origin band range',band.max(),band.min())
# # band,cache=tkintercorestat.pool_forward(band,{"f":int(ratio),"stride":int(ratio)})
# band=cv2.resize(band,(need_w,need_h),interpolation=cv2.INTER_LINEAR)
# bandrange=band.max()-band.min()
# print('band range',band.max(),band.min())
# band=(band-band.min())/bandrange*255
# print('button img range',band.max(),band.min())
# buttonimg=Image.fromarray(band.astype('uint8'),'L')
pcbuttons.append(ImageTk.PhotoImage(pcimg))
def partialsingleband(filter):
global displaybandarray,originpcabands
global pcbuttons
global nonzero_vector,partialpca
partialpca=True
bands=Multiimagebands[currentfilename].bands
channel,fea_l,fea_w=bands.shape
nonzero=np.where(filter!=0)
RGB_vector=np.zeros((displayfea_l*displayfea_w,3))
colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
filter_vector=filter.reshape((displayfea_l*displayfea_w),1)[:,0]
originbands={}
displays={}
if channel==1:
# Red=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
# Green=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
# Blue=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
# fillpartialbands(RGB_vector,0,Red,filter_vector)
# fillpartialbands(RGB_vector,1,Green,filter_vector)
# fillpartialbands(RGB_vector,2,Blue,filter_vector)
partialoneband(filter)
return
else:
Red=cv2.resize(bands[0,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
Green=cv2.resize(bands[1,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
Blue=cv2.resize(bands[2,:,:],(displayfea_w,displayfea_l),interpolation=cv2.INTER_LINEAR)[nonzero]
fillpartialbands(RGB_vector,0,Red,filter_vector)
fillpartialbands(RGB_vector,1,Green,filter_vector)
fillpartialbands(RGB_vector,2,Blue,filter_vector)
PAT_R=Red/(Red+Green)
PAT_G=Green/(Green+Blue)
PAT_B=Blue/(Blue+Red)
ROO_R=Red/Green
ROO_G=Green/Blue
ROO_B=Blue/Red
DIF_R=2*Red-Green-Blue
DIF_G=2*Green-Blue-Red
DIF_B=2*Blue-Red-Green
GLD_R=Red/(np.multiply(np.power(Blue,0.618),np.power(Green,0.382)))
GLD_G=Green/(np.multiply(np.power(Blue,0.618),np.power(Red,0.382)))
GLD_B=Blue/(np.multiply(np.power(Green,0.618),np.power(Red,0.382)))
fillpartialbands(colorindex_vector,0,PAT_R,filter_vector)
fillpartialbands(colorindex_vector,1,PAT_G,filter_vector)
fillpartialbands(colorindex_vector,2,PAT_B,filter_vector)
fillpartialbands(colorindex_vector,3,ROO_R,filter_vector)
fillpartialbands(colorindex_vector,4,ROO_G,filter_vector)
fillpartialbands(colorindex_vector,5,ROO_B,filter_vector)
fillpartialbands(colorindex_vector,6,DIF_R,filter_vector)
fillpartialbands(colorindex_vector,7,DIF_G,filter_vector)
fillpartialbands(colorindex_vector,8,DIF_B,filter_vector)
fillpartialbands(colorindex_vector,9,GLD_R,filter_vector)
fillpartialbands(colorindex_vector,10,GLD_G,filter_vector)
fillpartialbands(colorindex_vector,11,GLD_B,filter_vector)
for i in range(12):
perc=np.percentile(colorindex_vector[:,i],1)
print('perc',perc)
colorindex_vector[:,i]=np.where(colorindex_vector[:,i]<perc,perc,colorindex_vector[:,i])
perc=np.percentile(colorindex_vector[:,i],99)
print('perc',perc)
colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
for i in range(3):
perc=np.percentile(RGB_vector[:,i],1)
print('perc',perc)
RGB_vector[:,i]=np.where(RGB_vector[:,i]<perc,perc,RGB_vector[:,i])
perc=np.percentile(RGB_vector[:,i],99)
print('perc',perc)
RGB_vector[:,i]=np.where(RGB_vector[:,i]>perc,perc,RGB_vector[:,i])
nonzero_vector=np.where(filter_vector!=0)
rgb_M=np.mean(RGB_vector[nonzero_vector,:].T,axis=1)
colorindex_M=np.mean(colorindex_vector[nonzero_vector,:].T,axis=1)
print('rgb_M',rgb_M,'colorindex_M',colorindex_M)
rgb_C=RGB_vector[nonzero_vector,:][0]-rgb_M.T
colorindex_C=colorindex_vector[nonzero_vector,:][0]-colorindex_M.T
rgb_V=np.corrcoef(rgb_C.T)
color_V=np.corrcoef(colorindex_C.T)
nans=np.isnan(color_V)
color_V[nans]=1e-6
rgb_std=rgb_C/(np.std(RGB_vector[nonzero_vector,:].T,axis=1)).T
color_std=colorindex_C/(np.std(colorindex_vector[nonzero_vector,:].T,axis=1)).T
nans=np.isnan(color_std)
color_std[nans]=1e-6
rgb_eigval,rgb_eigvec=np.linalg.eig(rgb_V)
color_eigval,color_eigvec=np.linalg.eig(color_V)
print('rgb_eigvec',rgb_eigvec)
print('color_eigvec',color_eigvec)
featurechannel=12
pcabands=np.zeros((colorindex_vector.shape[0],featurechannel))
rgbbands=np.zeros((colorindex_vector.shape[0],3))
for i in range(0,9):
pcn=color_eigvec[:,i]
pcnbands=np.dot(color_std,pcn)
pcvar=np.var(pcnbands)
print('color index pc',i+1,'var=',pcvar)
pcabands[nonzero_vector,i]=pcabands[nonzero_vector,i]+pcnbands
for i in range(9,12):
pcn=rgb_eigvec[:,i-9]
pcnbands=np.dot(rgb_std,pcn)
pcvar=np.var(pcnbands)
print('rgb pc',i-9+1,'var=',pcvar)
pcabands[nonzero_vector,i]=pcabands[nonzero_vector,i]+pcnbands
rgbbands[nonzero_vector,i-9]=rgbbands[nonzero_vector,i-9]+pcnbands
# plot3d(pcabands)
# np.savetxt('rgb.csv',rgbbands,delimiter=',',fmt='%10.5f')
# pcabands[:,1]=np.copy(pcabands[:,1])
# pcabands[:,2]=pcabands[:,2]*0
# indexbands=np.zeros((colorindex_vector.shape[0],3))
# if i<5:
# indexbands[:,i-2]=indexbands[:,i-2]+pcnbands
for i in range(12):
perc=np.percentile(pcabands[:,i],1)
print('perc',perc)
pcabands[:,i]=np.where(pcabands[:,i]<perc,perc,pcabands[:,i])
perc=np.percentile(pcabands[:,i],99)
print('perc',perc)
pcabands[:,i]=np.where(pcabands[:,i]>perc,perc,pcabands[:,i])
'''save to csv'''
# indexbands[:,0]=indexbands[:,0]+pcabands[:,2]
# indexbands[:,1]=indexbands[:,1]+pcabands[:,3]
# indexbands[:,2]=indexbands[:,2]+pcabands[:,4]
# plot3d(indexbands)
# np.savetxt('pcs.csv',pcabands,delimiter=',',fmt='%10.5f')
displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
# np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%10.5f')
# displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
originpcabands.update({currentfilename:displayfea_vector})
pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
tempdictdisplay={'LabOstu':pcabandsdisplay}
displaybandarray.update({currentfilename:tempdictdisplay})
# originbandarray.update({currentfilename:originbands})
# Red=displays['Band1']
# Green=displays['Band2']
# Blue=displays['Band3']
# convimg=np.zeros((Red.shape[0],Red.shape[1],3))
# convimg[:,:,0]=Red
# convimg[:,:,1]=Green
# convimg[:,:,2]=Blue
# convimg=Image.fromarray(convimg.astype('uint8'))
# convimg.save('convimg.png','PNG')
pcbuttons=[]
need_w=int(450/3)
need_h=int(400/4)
for i in range(12):
band=np.copy(pcabandsdisplay[:,:,i])
imgband=(band-band.min())*255/(band.max()-band.min())
pcimg=Image.fromarray(imgband.astype('uint8'),'L')
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
pcimg.thumbnail((need_w,need_h),Image.ANTIALIAS)
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
# ratio=max(displayfea_l/need_h,displayfea_w/need_w)
# print('origin band range',band.max(),band.min())
# # band,cache=tkintercorestat.pool_forward(band,{"f":int(ratio),"stride":int(ratio)})
# band=cv2.resize(band,(need_w,need_h),interpolation=cv2.INTER_LINEAR)
# bandrange=band.max()-band.min()
# print('band range',band.max(),band.min())
# band=(band-band.min())/bandrange*255
# print('button img range',band.max(),band.min())
# buttonimg=Image.fromarray(band.astype('uint8'),'L')
pcbuttons.append(ImageTk.PhotoImage(pcimg))
def oneband(file):
global displaybandarray,originbandarray,originpcabands,displayfea_l,displayfea_w
global pcbuttons
global partialpca
partialpca=False
try:
bands=Multiimagebands[file].bands
except:
return
pcbuttons=[]
channel,fea_l,fea_w=bands.shape
print('bandsize',fea_l,fea_w)
if fea_l*fea_w>2000*2000:
ratio=findratio([fea_l,fea_w],[2000,2000])
else:
ratio=1
print('ratio',ratio)
originbands={}
displays={}
displaybands=cv2.resize(bands[0,:,:],(int(fea_w/ratio),int(fea_l/ratio)),interpolation=cv2.INTER_LINEAR)
displayfea_l,displayfea_w=displaybands.shape
RGB_vector=np.zeros((displayfea_l*displayfea_w,3))
colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
Red=bands[0,:,:].astype('uint8')
# _,Red=cv2.threshold(Red,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
Green=bands[0,:,:].astype('uint8')
# _,Green=cv2.threshold(Green,0,255,cv2.THRESH_OTSU)
Blue=bands[0,:,:].astype('uint8')
# _,Blue=cv2.threshold(Blue,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
fillbands(originbands,displays,RGB_vector,0,'Band1',Red)
fillbands(originbands,displays,RGB_vector,1,'Band2',Green)
fillbands(originbands,displays,RGB_vector,2,'Band3',Blue)
PAT_R=bands[0,:,:].astype('uint8')
# PAT_R=cv2.adaptiveThreshold(PAT_R,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
PAT_G=bands[0,:,:]
# PAT_G=cv2.adaptiveThreshold(PAT_G,255,cv2.ADAPTIVE_THRESH_MEAN_C,cv2.THRESH_BINARY,11,2)
PAT_B=bands[0,:,:]
ROO_R=bands[0,:,:]
ROO_G=bands[0,:,:]
ROO_B=bands[0,:,:]
DIF_R=bands[0,:,:]
DIF_G=bands[0,:,:]
DIF_B=bands[0,:,:]
GLD_R=bands[0,:,:]
GLD_G=bands[0,:,:]
GLD_B=bands[0,:,:]
fillbands(originbands,displays,colorindex_vector,0,'PAT_R',PAT_R)
fillbands(originbands,displays,colorindex_vector,1,'PAT_G',PAT_G)
fillbands(originbands,displays,colorindex_vector,2,'PAT_B',PAT_B)
fillbands(originbands,displays,colorindex_vector,3,'ROO_R',ROO_R)
fillbands(originbands,displays,colorindex_vector,4,'ROO_G',ROO_G)
fillbands(originbands,displays,colorindex_vector,5,'ROO_B',ROO_B)
fillbands(originbands,displays,colorindex_vector,6,'DIF_R',DIF_R)
fillbands(originbands,displays,colorindex_vector,7,'DIF_G',DIF_G)
fillbands(originbands,displays,colorindex_vector,8,'DIF_B',DIF_B)
fillbands(originbands,displays,colorindex_vector,9,'GLD_R',GLD_R)
fillbands(originbands,displays,colorindex_vector,10,'GLD_G',GLD_G)
fillbands(originbands,displays,colorindex_vector,11,'GLD_B',GLD_B)
displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
# np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%10.5f')
featurechannel=14
originpcabands.update({file:displayfea_vector})
# pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
# pcabandsdisplay=np.concatenate((RGB_vector,colorindex_vector),axis=2)
pcabandsdisplay=displayfea_vector[:,:14]
pcabandsdisplay=pcabandsdisplay.reshape(displayfea_l,displayfea_w,featurechannel)
tempdictdisplay={'LabOstu':pcabandsdisplay}
displaybandarray.update({file:tempdictdisplay})
originbandarray.update({file:originbands})
# Red=displays['Band1']
# Green=displays['Band2']
# Blue=displays['Band3']
# convimg=np.zeros((Red.shape[0],Red.shape[1],3))
# convimg[:,:,0]=Red
# convimg[:,:,1]=Green
# convimg[:,:,2]=Blue
# convimg=Image.fromarray(convimg.astype('uint8'))
# convimg.save('convimg.png','PNG')
need_w=int(450/3)
need_h=int(400/4)
for i in range(2,3):
band=np.copy(pcabandsdisplay[:,:,i])
# band=np.copy(Red)
# imgband=(band-band.min())*255/(band.max()-band.min())
imgband=np.copy(band)
pcimg=Image.fromarray(imgband.astype('uint8'),'L')
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
pcimg.thumbnail((need_w,need_h),Image.ANTIALIAS)
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
# ratio=max(displayfea_l/need_h,displayfea_w/need_w)
# print('origin band range',band.max(),band.min())
# # band,cache=tkintercorestat.pool_forward(band,{"f":int(ratio),"stride":int(ratio)})
# band=cv2.resize(band,(need_w,need_h),interpolation=cv2.INTER_LINEAR)
# bandrange=band.max()-band.min()
# print('band range',band.max(),band.min())
# band=(band-band.min())/bandrange*255
# print('button img range',band.max(),band.min())
# buttonimg=Image.fromarray(band.astype('uint8'),'L')
pcbuttons.append(ImageTk.PhotoImage(pcimg))
def singleband(file):
global displaybandarray,originbandarray,originpcabands,displayfea_l,displayfea_w
global pcbuttons
global partialpca
partialpca=False
try:
bands=Multiimagebands[file].bands
except:
return
pcbuttons=[]
channel,fea_l,fea_w=bands.shape
print('bandsize',fea_l,fea_w)
if fea_l*fea_w>2000*2000:
ratio=findratio([fea_l,fea_w],[2000,2000])
else:
ratio=1
print('ratio',ratio)
originbands={}
displays={}
displaybands=cv2.resize(bands[0,:,:],(int(fea_w/ratio),int(fea_l/ratio)),interpolation=cv2.INTER_LINEAR)
# displaybands=np.copy(bands[0,:,:])
displayfea_l,displayfea_w=displaybands.shape
# displayfea_l,displayfea_w=fea_l,fea_w
print(displayfea_l,displayfea_w)
RGB_vector=np.zeros((displayfea_l*displayfea_w,3))
colorindex_vector=np.zeros((displayfea_l*displayfea_w,12))
if channel==1:
# Red=bands[0,:,:]
# Green=bands[0,:,:]
# Blue=bands[0,:,:]
oneband(file)
return
else:
Red=bands[0,:,:]
Green=bands[1,:,:]
Blue=bands[2,:,:]
fillbands(originbands,displays,RGB_vector,0,'Band1',Red)
fillbands(originbands,displays,RGB_vector,1,'Band2',Green)
fillbands(originbands,displays,RGB_vector,2,'Band3',Blue)
# import matplotlib.pyplot as plt
# fig,axs=plt.subplots(1,3)
# for i in range(3):
# minpc2=np.min(RGB_vector[:,i])
# maxpc2=np.max(RGB_vector[:,i])
# print(minpc2,maxpc2)
# bins=range(int(minpc2),int(maxpc2),10)
# axs[i].hist(RGB_vector[:,i],bins,range=(minpc2,maxpc2))
# axs[i].set_title('RGBband_'+str(i+1))
# # plt.hist(pcabands[:,13],bins,range=(minpc2,maxpc2))
# plt.show()
# secondsmallest_R=np.partition(Red,1)[1][0]
# secondsmallest_G=np.partition(Green,1)[1][0]
# secondsmallest_B=np.partition(Blue,1)[1][0]
#
# Red=Red+secondsmallest_R
# Green=Green+secondsmallest_G
# Blue=Blue+secondsmallest_B
# Red=Red/255+1
# Green=Green/255+1
# Blue=Blue/255+1
PAT_R=Red/(Red+Green)
PAT_G=Green/(Green+Blue)
PAT_B=Blue/(Blue+Red)
ROO_R=Red/(Green+1e-6)
ROO_G=Green/(Blue+1e-6)
ROO_B=Blue/(Red+1e-6)
DIF_R=2*Red-Green-Blue
DIF_G=2*Green-Blue-Red
DIF_B=2*Blue-Red-Green
GLD_R=Red/(np.multiply(np.power(Blue,0.618),np.power(Green,0.382))+1e-6)
GLD_G=Green/(np.multiply(np.power(Blue,0.618),np.power(Red,0.382))+1e-6)
GLD_B=Blue/(np.multiply(np.power(Green,0.618),np.power(Red,0.382))+1e-6)
fillbands(originbands,displays,colorindex_vector,0,'PAT_R',PAT_R)
fillbands(originbands,displays,colorindex_vector,1,'PAT_G',PAT_G)
fillbands(originbands,displays,colorindex_vector,2,'PAT_B',PAT_B)
fillbands(originbands,displays,colorindex_vector,3,'ROO_R',ROO_R)
fillbands(originbands,displays,colorindex_vector,4,'ROO_G',ROO_G)
fillbands(originbands,displays,colorindex_vector,5,'ROO_B',ROO_B)
fillbands(originbands,displays,colorindex_vector,6,'DIF_R',DIF_R)
fillbands(originbands,displays,colorindex_vector,7,'DIF_G',DIF_G)
fillbands(originbands,displays,colorindex_vector,8,'DIF_B',DIF_B)
fillbands(originbands,displays,colorindex_vector,9,'GLD_R',GLD_R)
fillbands(originbands,displays,colorindex_vector,10,'GLD_G',GLD_G)
fillbands(originbands,displays,colorindex_vector,11,'GLD_B',GLD_B)
# for i in [5,11]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],99)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
#
# for i in [0,1,3,4,9,10]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],90)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
# for i in [5,11]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],99)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
#
# for i in [3,4,9,10]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],1)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]<perc,perc,colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],99)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
#
# for i in [0,1]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
# perc=np.percentile(colorindex_vector[:,i],2)
# print('perc',perc)
# colorindex_vector[:,i]=np.where(colorindex_vector[:,i]<perc,perc,colorindex_vector[:,i])
# for i in [0,1,3,4,9,10]:
# colorindex_vector[:,i]=np.log10(colorindex_vector[:,i])
for i in range(12):
perc=np.percentile(colorindex_vector[:,i],1)
print('perc',perc)
colorindex_vector[:,i]=np.where(colorindex_vector[:,i]<perc,perc,colorindex_vector[:,i])
perc=np.percentile(colorindex_vector[:,i],99)
print('perc',perc)
colorindex_vector[:,i]=np.where(colorindex_vector[:,i]>perc,perc,colorindex_vector[:,i])
for i in range(3):
perc=np.percentile(RGB_vector[:,i],1)
print('perc',perc)
RGB_vector[:,i]=np.where(RGB_vector[:,i]<perc,perc,RGB_vector[:,i])
perc=np.percentile(RGB_vector[:,i],99)
print('perc',perc)
RGB_vector[:,i]=np.where(RGB_vector[:,i]>perc,perc,RGB_vector[:,i])
# import matplotlib.pyplot as plt
# fig,axs=plt.subplots(4,3)
# for i in range(12):
# minpc2=np.min(colorindex_vector[:,i])
# maxpc2=np.max(colorindex_vector[:,i])
# print(minpc2,maxpc2)
# # bins=range(int(minpc2),int(maxpc2)+1,10)
# axs[int(i/3),i%3].hist(colorindex_vector[:,i],10,range=(minpc2,maxpc2))
# axs[int(i/3),i%3].set_title('Colorindex_'+str(i+1))
# # axs[i].hist(colorindex_vector[:,i],10,range=(minpc2,maxpc2))
# # axs[i].set_title('Colorindex_'+str(i+1))
# # plt.hist(pcabands[:,13],bins,range=(minpc2,maxpc2))
# plt.show()
rgb_M=np.mean(RGB_vector.T,axis=1)
colorindex_M=np.mean(colorindex_vector.T,axis=1)
print('rgb_M',rgb_M,'colorindex_M',colorindex_M)
rgb_C=RGB_vector-rgb_M
colorindex_C=colorindex_vector-colorindex_M
rgb_V=np.corrcoef(rgb_C.T)
color_V=np.corrcoef(colorindex_C.T)
nans=np.isnan(color_V)
color_V[nans]=1e-6
rgb_std=rgb_C/np.std(RGB_vector.T,axis=1)
color_std=colorindex_C/np.std(colorindex_vector.T,axis=1)
nans=np.isnan(color_std)
color_std[nans]=1e-6
rgb_eigval,rgb_eigvec=np.linalg.eig(rgb_V)
color_eigval,color_eigvec=np.linalg.eig(color_V)
print('rgb_eigvec',rgb_eigvec)
print('color_eigvec',color_eigvec)
featurechannel=12
pcabands=np.zeros((colorindex_vector.shape[0],featurechannel))
rgbbands=np.zeros((colorindex_vector.shape[0],3))
# plot3d(pcabands)
# np.savetxt('rgb.csv',rgbbands,delimiter=',',fmt='%10.5f')
# pcabands[:,1]=np.copy(pcabands[:,1])
# pcabands[:,2]=pcabands[:,2]*0
indexbands=np.zeros((colorindex_vector.shape[0],3))
# for i in range(3,featurechannel):
# csvpcabands=np.zeros((colorindex_vector.shape[0],15))
for i in range(0,9):
pcn=color_eigvec[:,i]
pcnbands=np.dot(color_std,pcn)
pcvar=np.var(pcnbands)
print('color index pc',i+1,'var=',pcvar)
pcabands[:,i]=pcabands[:,i]+pcnbands
# if i<5:
# indexbands[:,i-2]=indexbands[:,i-2]+pcnbands
for i in range(9,12):
pcn=rgb_eigvec[:,i-9]
pcnbands=np.dot(rgb_std,pcn)
pcvar=np.var(pcnbands)
print('rgb pc',i+1,'var=',pcvar)
pcabands[:,i]=pcabands[:,i]+pcnbands
rgbbands[:,i-9]=rgbbands[:,i-9]+pcnbands
# for i in range(0,12):
# pcn=color_eigvec[:,i]
# pcnbands=np.dot(color_std,pcn)
# pcvar=np.var(pcnbands)
# print('csv color index pc',i+1,'var=',pcvar)
# csvpcabands[:,i]=csvpcabands[:,i]+pcnbands
# for i in range(12,15):
# pcn=rgb_eigvec[:,i-12]
# pcnbands=np.dot(rgb_std,pcn)
# csvpcabands[:,i]=csvpcabands[:,i]+pcnbands
#
'''save to csv'''
# indexbands[:,0]=indexbands[:,0]+pcabands[:,2]
# indexbands[:,1]=indexbands[:,1]+pcabands[:,3]
# indexbands[:,2]=indexbands[:,2]+pcabands[:,4]
# plot3d(indexbands)
# np.savetxt('pcs.csv',pcabands,delimiter=',',fmt='%10.5f')
# minpc=np.min(pcabands)
#
# meanpc=np.mean(pcabands)
# stdpc=np.std(pcabands)
# print('meanpc',meanpc,'stdpc',stdpc)
# pcabands=pcabands-meanpc/stdpc
# import matplotlib.pyplot as plt
# minpc2=np.min(pcabands[:,13])
# maxpc2=np.max(pcabands[:,13])
# print(minpc2,maxpc2)
# bins=range(int(minpc2),int(maxpc2),10)
# plt.hist(pcabands[:,13],bins,range=(minpc2,maxpc2))
# plt.show()
# np.savetxt('pcs.csv',pcabands[:,3],delimiter=',',fmt='%10.5f')
for i in range(12):
perc=np.percentile(pcabands[:,i],1)
print('perc',perc)
pcabands[:,i]=np.where(pcabands[:,i]<perc,perc,pcabands[:,i])
perc=np.percentile(pcabands[:,i],99)
print('perc',perc)
pcabands[:,i]=np.where(pcabands[:,i]>perc,perc,pcabands[:,i])
# import matplotlib.pyplot as plt
# fig,axs=plt.subplots(4,3)
# for i in range(2,14):
# minpc2=np.min(pcabands[:,i])
# maxpc2=np.max(pcabands[:,i])
# print(minpc2,maxpc2)
# # bins=range(int(minpc2),int(maxpc2)+1,10)
# axs[int((i-2)/3),(i-2)%3].hist(pcabands[:,i],10,range=(minpc2,maxpc2))
# axs[int((i-2)/3),(i-2)%3].set_title('PC_'+str(i-2+1))
# # axs[i].hist(colorindex_vector[:,i],10,range=(minpc2,maxpc2))
# # axs[i].set_title('Colorindex_'+str(i+1))
# # plt.hist(pcabands[:,13],bins,range=(minpc2,maxpc2))
# plt.show()
# header=['R','G','B',
# 'PAT_R','PAT_G','PAT_B',
# 'DIF_R','DIF_G','DIF_B',
# 'ROO_R','ROO_G','ROO_B',
# 'GLD_R','GLD_G','GLD_B',]
# displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
# with open('color-index.csv','w') as f:
# writer=csv.writer(f)
# writer.writerow(header)
# for i in range(displayfea_vector.shape[0]):
# writer.writerow(list(displayfea_vector[i,:]))
# np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%10.5f')
displayfea_vector=np.concatenate((RGB_vector,colorindex_vector),axis=1)
originpcabands.update({file:displayfea_vector})
pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
tempdictdisplay={'LabOstu':pcabandsdisplay}
displaybandarray.update({file:tempdictdisplay})
originbandarray.update({file:originbands})
# Red=displays['Band1']
# Green=displays['Band2']
# Blue=displays['Band3']
# convimg=np.zeros((Red.shape[0],Red.shape[1],3))
# convimg[:,:,0]=Red
# convimg[:,:,1]=Green
# convimg[:,:,2]=Blue
# convimg=Image.fromarray(convimg.astype('uint8'))
# convimg.save('convimg.png','PNG')
need_w=int(450/3)
need_h=int(400/4)
# pcdisplay=[3,4,5,6,7,8,9,10,11,0,1,2]
# for i in range(2,featurechannel):
for i in range(featurechannel):
band=np.copy(pcabandsdisplay[:,:,i])
imgband=(band-band.min())*255/(band.max()-band.min())
pcimg=Image.fromarray(imgband.astype('uint8'),'L')
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
pcimg.thumbnail((need_w,need_h),Image.ANTIALIAS)
# pcimg.save('pc'+'_'+str(i)+'.png',"PNG")
# ratio=max(displayfea_l/need_h,displayfea_w/need_w)
# print('origin band range',band.max(),band.min())
# # band,cache=tkintercorestat.pool_forward(band,{"f":int(ratio),"stride":int(ratio)})
# band=cv2.resize(band,(need_w,need_h),interpolation=cv2.INTER_LINEAR)
# bandrange=band.max()-band.min()
# print('band range',band.max(),band.min())
# band=(band-band.min())/bandrange*255
# print('button img range',band.max(),band.min())
# buttonimg=Image.fromarray(band.astype('uint8'),'L')
pcbuttons.append(ImageTk.PhotoImage(pcimg))
def colorindices_cal(file):
global colorindicearray
try:
bands=Multiimagebands[file].bands
except:
return
channel,fea_l,fea_w=bands.shape
print('bandsize',fea_l,fea_w)
if fea_l*fea_w>2000*2000:
ratio=findratio([fea_l,fea_w],[2000,2000])
else:
ratio=1
print('ratio',ratio)
originbands={}
displays={}
# displaybands=cv2.resize(bands[0,:,:],(int(fea_w/ratio),int(fea_l/ratio)),interpolation=cv2.INTER_LINEAR)
# displaybands=np.copy(bands[0,:,:])
# displayfea_l,displayfea_w=displaybands.shape
# displayfea_l,displayfea_w=fea_l,fea_w
print(displayfea_l,displayfea_w)
colorindex_vector=np.zeros((displayfea_l*displayfea_w,7))
if channel==1:
Red=bands[0,:,:]
Green=bands[0,:,:]
Blue=bands[0,:,:]
else:
Red=bands[0,:,:]
Green=bands[1,:,:]
Blue=bands[2,:,:]
secondsmallest_R=np.partition(Red,1)[1][0]
secondsmallest_G=np.partition(Green,1)[1][0]
secondsmallest_B=np.partition(Blue,1)[1][0]
Red=Red+secondsmallest_R
Green=Green+secondsmallest_G
Blue=Blue+secondsmallest_B
NDI=128*((Green-Red)/(Green+Red)+1)
VEG=Green/(np.power(Red,0.667)*np.power(Blue,(1-0.667)))
Greenness=Green/(Green+Red+Blue)
CIVE=0.44*Red+0.811*Green+0.385*Blue+18.7845
MExG=1.262*Green-0.844*Red-0.311*Blue
NDRB=(Red-Blue)/(Red+Blue)
NGRDI=(Green-Red)/(Green+Red)
fillbands(originbands,displays,colorindex_vector,0,'NDI',NDI)
fillbands(originbands,displays,colorindex_vector,1,'VEG',VEG)
fillbands(originbands,displays,colorindex_vector,2,'Greenness',Greenness)
fillbands(originbands,displays,colorindex_vector,3,'CIVE',CIVE)
fillbands(originbands,displays,colorindex_vector,4,'MExG',MExG)
fillbands(originbands,displays,colorindex_vector,5,'NDRB',NDRB)
fillbands(originbands,displays,colorindex_vector,6,'NGRDI',NGRDI)
colorindicearray.update({file:originbands})
def singleband_oldversion(file):
global displaybandarray,originbandarray,originpcabands,displayfea_l,displayfea_w
global pcbuttons
try:
bands=Multigraybands[file].bands
except:
return
pcbuttons=[]
bandsize=Multigraybands[file].size
print('bandsize',bandsize)
try:
channel,height,width=bands.shape
except:
channel=0
if channel>1:
bands=bands[0,:,:]
#bands=cv2.GaussianBlur(bands,(3,3),cv2.BORDER_DEFAULT)
ostu=filters.threshold_otsu(bands)
bands=bands.astype('float32')
bands=bands/ostu
#display purpose
if bandsize[0]*bandsize[1]>2000*2000:
ratio=findratio([bandsize[0],bandsize[1]],[2000,2000])
else:
ratio=1
print('ratio',ratio)
#if bandsize[0]*bandsize[1]>850*850:
# ratio=findratio([bandsize[0],bandsize[1]],[850,850])
#else:
# ratio=1
#ttestbands=np.copy(bands)
#testdisplaybands=cv2.resize(ttestbands,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#testdisplaybands=cv2.resize(testdisplaybands,(int(resizeshape[0]),int(resizeshape[1])),interpolation=cv2.INTER_LINEAR)
#print('testdisplaybands size',testdisplaybands.size)
#if bandsize[0]*bandsize[1]>850*850:
# ratio=findratio([bandsize[0],bandsize[1]],[850,850])
#else:
# ratio=1
originbands={}
displays={}
fea_l,fea_w=bands.shape
# fea_vector=np.zeros((fea_l*fea_w,3))
pyplt.imsave('bands.png',bands)
displaybands=cv2.resize(bands,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
pyplt.imsave('displaybands.png',displaybands)
displayfea_l,displayfea_w=displaybands.shape
fea_vector=np.zeros((displayfea_l*displayfea_w,3))
displayfea_vector=np.zeros((displayfea_l*displayfea_w,7))
colorfea_vector=np.zeros((displayfea_l*displayfea_w,7))
# originfea_vector=np.zeros((bandsize[0],bandsize[1],10))
# saveimg=np.copy(bands).astype('uint8')
# pyplt.imsave('ostuimg.png',saveimg)
if 'LabOstu' not in originbands:
originbands.update({'LabOstu':bands})
fea_bands=bands.reshape(fea_l*fea_w,1)[:,0]
# originfea_vector[:,9]=originfea_vector[:,0]+fea_bands
displayfea_bands=displaybands.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,9]=fea_vector[:,0]+fea_bands
displayfea_vector[:,6]=displayfea_vector[:,6]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,6]=colorfea_vector[:,6]+colorfeabands
#displaybands=displaybands.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
#kernel=np.ones((2,2),np.float32)/4
#displaybands=np.copy(bands)
displays.update({'LabOstu':displaybands})
#displaybandarray.update({'LabOstu':cv2.filter2D(displaybands,-1,kernel)})
bands=Multiimagebands[file].bands
#for i in range(3):
# bands[i,:,:]=cv2.GaussianBlur(bands[i,:,:],(3,3),cv2.BORDER_DEFAULT)
NDI=128*((bands[1,:,:]-bands[0,:,:])/(bands[1,:,:]+bands[0,:,:])+1)
tempdict={'NDI':NDI}
# saveimg=np.copy(NDI).astype('uint8')
# pyplt.imsave('NDIimg.png',saveimg)
if 'NDI' not in originbands:
originbands.update(tempdict)
displaybands=cv2.resize(NDI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
fea_bands=NDI.reshape(fea_l*fea_w,1)[:,0]
# originfea_vector[:,1]=originfea_vector[:,1]+fea_bands
displayfea_bands=displaybands.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,1]=fea_vector[:,1]+fea_bands
displayfea_vector[:,1]=displayfea_vector[:,1]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,1]=colorfea_vector[:,1]+colorfeabands
#displaybands=np.copy(NDI)
#kernel=np.ones((2,2),np.float32)/4
#displaydict={'NDI':cv2.filter2D(displaybands,-1,kernel)}
displaydict={'NDI':displaybands}
#displaydict=displaydict.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
displays.update(displaydict)
Red=bands[0,:,:]
Green=bands[1,:,:]
Blue=bands[2,:,:]
tempdict={'Band1':Red}
# saveimg=np.zeros((bandsize[0],bandsize[1],3),'uint8')
# saveimg[:,:,0]=np.copy(Red).astype('uint8')
# pyplt.imsave('Redimg.png',saveimg)
# saveimg=np.zeros((bandsize[0],bandsize[1],3),'uint8')
# saveimg[:,:,1]=np.copy(Green).astype('uint8')
# pyplt.imsave('Greenimg.png',saveimg)
# saveimg=np.zeros((bandsize[0],bandsize[1],3),'uint8')
# saveimg[:,:,2]=np.copy(Blue).astype('uint8')
# pyplt.imsave('Blueimg.png',saveimg)
if 'Band1' not in originbands:
originbands.update(tempdict)
image=cv2.resize(Red,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band1':image}
displays.update(displaydict)
# fea_bands=Red.reshape(fea_l*fea_w,1)[:,0]
fea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# originfea_vector[:,2]=originfea_vector[:,2]+fea_bands
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,0]=fea_vector[:,0]+fea_bands
# displayfea_vector[:,2]=displayfea_vector[:,2]+displayfea_bands
tempdict={'Band2':Green}
if 'Band2' not in originbands:
originbands.update(tempdict)
image=cv2.resize(Green,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band2':image}
displays.update(displaydict)
# fea_bands=Green.reshape(fea_l*fea_w,1)[:,0]
fea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# originfea_vector[:,3]=originfea_vector[:,3]+fea_bands
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,1]=fea_vector[:,1]+fea_bands
# displayfea_vector[:,3]=displayfea_vector[:,3]+displayfea_bands
tempdict={'Band3':Blue}
if 'Band3' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,4]=originfea_vector[:,4]+Blue
image=cv2.resize(Blue,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
displaydict={'Band3':image}
displays.update(displaydict)
# fea_bands=Blue.reshape(fea_l*fea_w,1)[:,0]
fea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
fea_vector[:,2]=fea_vector[:,2]+fea_bands
# displayfea_vector[:,4]=displayfea_vector[:,4]+displayfea_bands
Greenness = bands[1, :, :] / (bands[0, :, :] + bands[1, :, :] + bands[2, :, :])
tempdict = {'Greenness': Greenness}
if 'Greenness' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,5]=originfea_vector[:,5]+Greenness
image=cv2.resize(Greenness,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
displaydict={'Greenness':image}
#displaybandarray.update(worktempdict)
displays.update(displaydict)
fea_bands=Greenness.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,5]=fea_vector[:,5]+fea_bands
displayfea_vector[:,2]=displayfea_vector[:,2]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,2]=colorfea_vector[:,2]+colorfeabands
VEG=bands[1,:,:]/(np.power(bands[0,:,:],0.667)*np.power(bands[2,:,:],(1-0.667)))
tempdict={'VEG':VEG}
if 'VEG' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,6]=originfea_vector[:,6]+VEG
image=cv2.resize(VEG,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
kernel=np.ones((4,4),np.float32)/16
#displaybandarray.update({'LabOstu':})
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'VEG':cv2.filter2D(image,-1,kernel)}
displays.update(worktempdict)
fea_bands=VEG.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,6]=fea_vector[:,6]+fea_bands
displayfea_vector[:,3]=displayfea_vector[:,3]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,3]=colorfea_vector[:,3]+colorfeabands
CIVE=0.441*bands[0,:,:]-0.811*bands[1,:,:]+0.385*bands[2,:,:]+18.78745
tempdict={'CIVE':CIVE}
if 'CIVE' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,7]=originfea_vector[:,7]+CIVE
image=cv2.resize(CIVE,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'CIVE':image}
displays.update(worktempdict)
fea_bands=CIVE.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,7]=fea_vector[:,7]+fea_bands
displayfea_vector[:,4]=displayfea_vector[:,4]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,4]=colorfea_vector[:,4]+colorfeabands
MExG=1.262*bands[1,:,:]-0.884*bands[0,:,:]-0.311*bands[2,:,:]
tempdict={'MExG':MExG}
if 'MExG' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,8]=originfea_vector[:,8]+MExG
image=cv2.resize(MExG,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'MExG':image}
displays.update(worktempdict)
fea_bands=MExG.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,8]=fea_vector[:,8]+fea_bands
displayfea_vector[:,5]=displayfea_vector[:,5]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,5]=colorfea_vector[:,5]+colorfeabands
NDVI=(bands[0,:,:]-bands[2,:,:])/(bands[0,:,:]+bands[2,:,:])
tempdict={'NDVI':NDVI}
if 'NDVI' not in originbands:
originbands.update(tempdict)
# originfea_vector[:,0]=originfea_vector[:,9]+NDVI
image=cv2.resize(NDVI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NDVI':image}
displays.update(worktempdict)
fea_bands=NDVI.reshape(fea_l*fea_w,1)[:,0]
displayfea_bands=image.reshape((displayfea_l*displayfea_w),1)[:,0]
# fea_vector[:,0]=fea_vector[:,9]+fea_bands
displayfea_vector[:,0]=displayfea_vector[:,0]+displayfea_bands
minv=displayfea_bands.min()
maxv=displayfea_bands.max()
fearange=maxv-minv
colorfeabands=displayfea_bands-minv
colorfeabands=colorfeabands/fearange*255
colorfea_vector[:,0]=colorfea_vector[:,0]+colorfeabands
NGRDI=(bands[1,:,:]-bands[0,:,:])/(bands[1,:,:]+bands[0,:,:])
tempdict={'NGRDI':NGRDI}
if 'NGRDI' not in originbands:
originbands.update(tempdict)
image=cv2.resize(NGRDI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NGRDI':image}
displays.update(worktempdict)
if channel>=1:
nirbands=Multigraybands[file].bands
NDVI=(nirbands[0,:,:]-bands[1,:,:])/(nirbands[0,:,:]+bands[1,:,:])
tempdict={'NDVI':NDVI}
#if 'NDVI' not in originbandarray:
originbands.update(tempdict)
image=cv2.resize(NDVI,(int(bandsize[1]/ratio),int(bandsize[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#image=image.reshape((int(bandsize[1]/ratio),int(bandsize[0]/ratio),3))
worktempdict={'NDVI':image}
displays.update(worktempdict)
'''PCA part'''
displayfea_vector=np.concatenate((fea_vector,displayfea_vector),axis=1)
M=np.mean(displayfea_vector.T,axis=1)
OM=np.mean(fea_vector.T,axis=1)
print('M',M,'M shape',M.shape, 'OM',OM,'OM Shape',OM.shape)
C=displayfea_vector-M
OC=fea_vector-OM
#max=np.max(C.T,axis=1)
#print('MAX',max)
#C=C/max
print('C',C,'OC',OC)
#V=np.cov(C.T)
V=np.corrcoef(C.T)
OV=np.corrcoef(OC.T)
std=np.std(displayfea_vector.T,axis=1)
O_std=np.std(fea_vector.T,axis=1)
print(std,O_std)
std_displayfea=C/std
O_stddisplayfea=OC/O_std
print(std_displayfea,O_stddisplayfea)
#eigvalues,eigvectors=np.linalg.eig(V)
#n,m=displayfea_vector.shape
#C=np.dot(displayfea_vector.T,displayfea_vector)/(n-1)
V_var=np.cov(std_displayfea.T)
print('COV',V_var)
print('COR',V)
eigvalues=la.eigvals(V_var)
#eigvalues=np.linalg.eigvals(C)
print('eigvalue',eigvalues)
idx=np.argsort(eigvalues)
print('idx',idx)
eigvalues,eigvectors=np.linalg.eig(V)
print('eigvalue',eigvalues)
print('eigvectors',eigvectors)
eigvalueperc={}
featurechannel=10
# for i in range(len(eigvalues)):
# print('percentage',i,eigvalues[i]/sum(eigvalues))
# eigvalueperc.update({i:eigvalues[i]/sum(eigvalues)})
# #if eigvalues[i]>0:
# featurechannel+=1
# o_eigenvalue,o_eigenvector=np.linalg.eig(OV)
pcabands=np.zeros((displayfea_vector.shape[0],featurechannel))
# o_pcabands=np.zeros((fea_vector.shape[0],featurechannel))
pcavar={}
# #
# # # separate PCs
# # for i in range(3):
# # pcn=o_eigenvector[:,i]
# # pcnbands=np.dot(O_stddisplayfea,pcn)
# # pcvar=np.var(pcnbands)
# # print('pc',i+1,' var=',pcvar)
# # pcabands[:,i]=pcabands[:,i]+pcnbands
# # for i in range(7):
# # pcn=eigvectors[:,i]
# # pcnbands=np.dot(std_displayfea,pcn)
# # pcvar=np.var(pcnbands)
# # print('pc',i+1,' var=',pcvar)
# # temppcavar={i:pcvar}
# # pcavar.update(temppcavar)
# # pcabands[:,i+3]=pcabands[:,i+3]+pcnbands
# #
# #
# combined PCs
for i in range(featurechannel):
pcn=eigvectors[:,i]
# pcnbands=np.dot(std_displayfea,pcn)
pcnbands=np.dot(C,pcn)
pcvar=np.var(pcnbands)
print('pc',i+1,' var=',pcvar)
temppcavar={i:pcvar}
pcavar.update(temppcavar)
pcabands[:,i]=pcabands[:,i]+pcnbands
# ''' NO PCA'''
# colorfea_vector=np.concatenate((fea_vector,colorfea_vector),axis=1)
# displayfea_vector=np.concatenate((fea_vector,displayfea_vector),axis=1)
# M=np.mean(colorfea_vector.T,axis=1)
# print('colorfea_vector M',M)
# pcabands=np.copy(colorfea_vector)
# featurechannel=10
'''Export to CSV'''
# np.savetxt('pcs.csv',pcabands,delimiter=',',fmt='%s')
# np.savetxt('color-index.csv',displayfea_vector,delimiter=',',fmt='%s')
#threedplot(pcabands)
# originpcabands.update({file:o_pcabands})
originpcabands.update({file:displayfea_vector})
pcabandsdisplay=pcabands.reshape(displayfea_l,displayfea_w,featurechannel)
#originbands={'LabOstu':pcabandsdisplay}
tempdictdisplay={'LabOstu':pcabandsdisplay}
#displaybandarray.update({file:displays})
displaybandarray.update({file:tempdictdisplay})
originbandarray.update({file:originbands})
need_w=int(450/4)
need_h=int(400/3)
for i in range(featurechannel):
band=np.copy(pcabandsdisplay[:,:,i])
ratio=max(displayfea_l/need_h,displayfea_w/need_w)
band,cache=tkintercorestat.pool_forward(band,{"f":int(ratio),"stride":int(ratio)})
bandrange=band.max()-band.min()
band=(band-band.min())/bandrange*255
buttonimg=Image.fromarray(band.astype('uint8'),'L')
pcbuttons.append(ImageTk.PhotoImage(buttonimg))
# buttonimg.save('pcbutton_'+str(i)+'.png',"PNG")
# print('saved')
from mpl_toolkits.mplot3d import Axes3D
def threedplot(area):
fig=pyplt.figure()
ax=fig.add_subplot(111,projection='3d')
n=100
xs=np.copy(area[0:n,0])
ys=np.copy(area[0:n,1])
zs=np.copy(area[0:n,3])
colors=("red","green","blue")
groups=("PC1","PC2","PC3")
#for c,l in [('r','o'),('g','^')]:
ax.scatter(xs,ys,np.max(zs),c='r',marker='o')
ax.scatter(xs,np.min(ys),zs,c='b',marker='^')
ax.scatter(np.max(xs),ys,zs,c='g')
ax.set_xlabel('PC1')
ax.set_ylabel('PC2')
ax.set_zlabel('PC3')
pyplt.show()
def changeimage(frame,filename):
global clusterdisplay,currentfilename,resviewframe
clusterdisplay={}
currentfilename=filename
print(filename)
generatedisplayimg(filename)
changedisplayimg(frame,'Origin')
for key in cluster:
tuplist=[]
for i in range(len(cluster)):
tuplist.append('')
tup=tuple(tuplist)
bandchoice[key].set(tup)
#for key in cluster:
# ch=ttk.Checkbutton(contentframe,text=key,variable=bandchoice[key],command=changecluster)#,command=partial(autosetclassnumber,clusternumberentry,bandchoice))
# ch.pack()
if filename in multi_results.keys():
for widget in resviewframe.winfo_children():
widget.pack_forget()
iternum=len(list(multi_results[filename][0].keys()))
itervar=IntVar()
itervar.set(iternum)
resscaler=Scale(resviewframe,from_=1,to=iternum,tickinterval=1,length=220,orient=HORIZONTAL,variable=itervar,command=partial(changeoutputimg,filename))
resscaler.pack()
outputbutton=Button(resviewframe,text='Export Results',command=partial(export_result,itervar))
outputbutton.pack()
def generatecheckbox(frame,classnum):
global checkboxdict,havecolorstrip
changekmeansbar('')
for widget in frame.winfo_children():
widget.pack_forget()
checkboxdict={}
havecolorstrip=False
addcolorstrip()
for i in range(10):
dictkey=str(i+1)
tempdict={dictkey:Variable()}
tempdict[dictkey].set('0')
checkboxdict.update(tempdict)
ch=Checkbutton(checkboxframe,text=dictkey,variable=checkboxdict[dictkey],command=partial(changeclusterbox,''))#,command=partial(changecluster,''))
if i+1>int(kmeans.get()):
ch.config(state=DISABLED)
ch.pack(side=LEFT)
#if i==0:
# ch.invoke()
#for i in range(int(classnum)):
# dictkey='class '+str(i+1)
# tempdict={dictkey:Variable()}
# checkboxdict.update(tempdict)
#ch=ttk.Checkbutton(frame,text=dictkey,command=partial(generateplant,checkboxdict,bandchoice,classnum),variable=checkboxdict[dictkey])
# ch=ttk.Checkbutton(frame,text=dictkey,command=changecluster,variable=checkboxdict[dictkey])
# ch.grid(row=int(i/3),column=int(i%3))
# if i==minipixelareaclass:
# ch.invoke()
def generateimgplant(event):
global currentlabels,changekmeans,colordicesband,originbinaryimg,pre_checkbox
colordicesband=np.copy(displaylabels)
keys=checkboxdict.keys()
plantchoice=[]
pre_checkbox=[]
for key in keys:
plantchoice.append(checkboxdict[key].get())
pre_checkbox.append(checkboxdict[key].get())
origindisplaylabels=np.copy(displaybandarray[currentfilename]['LabOstu'])
h,w,c=origindisplaylabels.shape
# tempdisplayimg=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0],
# displaybandarray[currentfilename]['LabOstu'].shape[1]))
# colordivimg=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0],
# displaybandarray[currentfilename]['LabOstu'].shape[1]))
tempdisplayimg=np.zeros((h,w))
colordivimg=np.zeros((h,w))
sel_count=plantchoice.count('1')
if sel_count == int(kmeans.get()):
tempdisplayimg=tempdisplayimg+1
else:
for i in range(int(kmeans.get())):
tup=plantchoice[i]
if '1' in tup:
tempdisplayimg=np.where(displaylabels==i,1,tempdisplayimg)
# uniquecolor=np.unique(tempdisplayimg)
# if len(uniquecolor)==1 and uniquecolor[0]==1:
# tempdisplayimg=np.copy(displaylabels).astype('float32')
currentlabels=np.copy(tempdisplayimg)
originbinaryimg=np.copy(tempdisplayimg)
tempcolorimg=np.copy(displaylabels).astype('float32')
# ratio=findratio([h,w],[850,850])
# if h*w<850*850:
# tempdisplayimg=cv2.resize(tempdisplayimg,(int(w*ratio),int(h*ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(w*ratio),int(h*ratio)))
# if h>850:
# ratio=round(h/850)
# tempdisplayimg=cv2.resize(tempdisplayimg,(int(w/ratio),int(h/ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(w/ratio),int(h/ratio)))
# if w>850:
# ratio=round(w/850)
# tempdisplayimg=cv2.resize(tempdisplayimg,(int(w/ratio),int(h/ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(w/ratio),int(h/ratio)))
# else:
# tempdisplayimg=cv2.resize(tempdisplayimg,(int(w/ratio),int(h/ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(w/ratio),int(h/ratio)))
# tempdisplayimg=cv2.resize(tempdisplayimg,(int(resizeshape[0]),int(resizeshape[1])))
# colordivimg=cv2.resize(tempcolorimg,(int(resizeshape[0]),int(resizeshape[1])))
colordivimg=np.copy(tempcolorimg)
binaryimg=np.zeros((h,w,3))
kvar=int(kmeans.get())
locs=np.where(tempdisplayimg==1)
binaryimg[locs]=[240,228,66]
colordeimg=np.zeros((h,w,3))
# binarypreview=cv2.resize(binaryimg,(int(previewshape[0]),int(previewshape[1])))
binarypreview=np.copy(binaryimg)
if kvar==1:
if colordivimg.min()<0:
# if abs(colordivimg.min())<colordivimg.max():
colordivimg=colordivimg-colordivimg.min()
colorrange=colordivimg.max()-colordivimg.min()
colordivimg=colordivimg*255/colorrange
grayimg=Image.fromarray(colordivimg.astype('uint8'),'L')
grayimg=grayimg.resize((int(resizeshape[0]),int(resizeshape[1])))
#grayimg.show()
colordivdict={}
colordivdict.update({'Size':[resizeshape[1],resizeshape[0]]})
colordivdict.update({'Image':ImageTk.PhotoImage(grayimg)})
displayimg['Color Deviation']=colordivdict
colordivpreview={}
# colordivpreimg=cv2.resize(colordivimg,(int(previewshape[0]),int(previewshape[1])))
graypreviewimg=Image.fromarray(colordivimg.astype('uint8'),'L')
graypreviewimg=graypreviewimg.resize((int(previewshape[0]),int(previewshape[1])))
colordivpreview.update({'Size':[previewshape[1],previewshape[0]]})
colordivpreview.update({'Image':ImageTk.PhotoImage(graypreviewimg)})
previewimg['Color Deviation']=colordivpreview
binaryimg=np.zeros((resizeshape[1],resizeshape[0],3))
tempdict={}
tempdict.update({'Size':[resizeshape[1],resizeshape[0]]})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(binaryimg.astype('uint8')))})
displayimg['ColorIndices']=tempdict
binarypreview=np.zeros((int(previewshape[1]),int(previewshape[0])))
tempdict={}
tempdict.update({'Size':binarypreview.shape})
tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(binarypreview.astype('uint8')))})
previewimg['ColorIndices']=tempdict
# changedisplayimg(imageframe,'Color Deviation')
else:
for i in range(kvar):
locs=np.where(colordivimg==i)
colordeimg[locs]=colorbandtable[i]
#pyplt.imsave('displayimg.png',tempdisplayimg)
#pyplt.imsave('allcolorindex.png',colordivimg)
#bands=Image.fromarray(tempdisplayimg)
#bands=bands.convert('L')
#bands.save('displayimg.png')
#indimg=cv2.imread('displayimg.png')
colordeimg=Image.fromarray(colordeimg.astype('uint8'))
colordeimg.save('allcolorindex.png',"PNG")
binaryimg=Image.fromarray(binaryimg.astype('uint8'))
binaryimg.save('binaryimg.png',"PNG")
binaryimg=binaryimg.resize((int(resizeshape[0]),int(resizeshape[1])))
tempdict={}
tempdict.update({'Size':[resizeshape[1],resizeshape[0]]})
tempdict.update({'Image':ImageTk.PhotoImage(binaryimg)})
displayimg['ColorIndices']=tempdict
tempdict={}
binaryimg=binaryimg.resize((int(previewshape[0]),int(previewshape[1])))
tempdict.update({'Size':[previewshape[1],previewshape[0]]})
tempdict.update({'Image':ImageTk.PhotoImage(binaryimg)})
previewimg['ColorIndices']=tempdict
#indimg=cv2.imread('allcolorindex.png')
#tempdict.update({'Image':ImageTk.PhotoImage(Image.fromarray(indimg))})
#
# colorimg=cv2.imread('allcolorindex.png')
# Image.fromarray((binaryimg.astype('uint8'))).save('binaryimg.png',"PNG")
colordeimg=colordeimg.resize((resizeshape[0],resizeshape[1]))
colordivdict={}
colordivdict.update({'Size':[resizeshape[1],resizeshape[0]]})
colordivdict.update({'Image':ImageTk.PhotoImage(colordeimg)})
displayimg['Color Deviation']=colordivdict
colordivdict={}
# colordeimgpre=cv2.resize(colordeimg,(int(previewshape[0]),int(previewshape[1])))
colordeimg=colordeimg.resize((previewshape[0],previewshape[1]))
colordivdict.update({'Size':[previewshape[1],previewshape[0]]})
colordivdict.update({'Image':ImageTk.PhotoImage(colordeimg)})
previewimg['Color Deviation']=colordivdict
# changedisplayimg(imageframe,'ColorIndices')
# print('sel count',sel_count)
if kvar>1:
if sel_count==0:
changedisplayimg(imageframe,'Color Deviation')
else:
changedisplayimg(imageframe,'ColorIndices')
# changekmeans=True
#def kmeansclassify(choicelist,reshapedtif):
def kmeansclassify_oldversion():
global clusterdisplay
#,minipixelareaclass
if int(kmeans.get())==0:
return
#for i in range(len(choicelist)):
# tempband=displaybandarray[currentfilename][choicelist[i]]
#tempband=cv2.resize(tempband,(450,450),interpolation=cv2.INTER_LINEAR)
# reshapedtif[:,i]=tempband.reshape(tempband.shape[0]*tempband.shape[1],2)[:,0]
#if len(choicelist)==0:
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcacount={}
keys=list(pcaboxdict.keys())
for item in keys:
if pcaboxdict[item].get()=='1':
pcacount.update({item:pcaboxdict[item]})
pcakeys=list(pcacount.keys())
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
if int(kmeans.get())==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave('k=1.png',displaylabels)
else:
#tempband=displaybandarray[currentfilename]['LabOstu']
if int(kmeans.get())>1:
h,w,c=tempband.shape
print('shape',tempband.shape)
reshapedtif=tempband.reshape(tempband.shape[0]*tempband.shape[1],c)
print('reshape',reshapedtif.shape)
clf=KMeans(n_clusters=int(kmeans.get()),init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(reshapedtif)
# print('label=0',np.any(tempdisplayimg==0))
displaylabels=tempdisplayimg.labels_.reshape((displaybandarray[currentfilename]['LabOstu'].shape[0],
displaybandarray[currentfilename]['LabOstu'].shape[1]))
clusterdict={}
displaylabels=displaylabels+10
for i in range(int(kmeans.get())):
locs=np.where(tempdisplayimg.labels_==i)
maxval=reshapedtif[locs].max()
print(maxval)
clusterdict.update({maxval:i+10})
print(clusterdict)
sortcluster=list(sorted(clusterdict))
print(sortcluster)
for i in range(len(sortcluster)):
cluster_num=clusterdict[sortcluster[i]]
displaylabels=np.where(displaylabels==cluster_num,i,displaylabels)
# pixelarea=1.0
# for i in range(int(kmeans.get())):
# pixelloc=np.where(displaylabels==i)
# pixelnum=len(pixelloc[0])
# temparea=float(pixelnum/(displaylabels.shape[0]*displaylabels.shape[1]))
# if temparea<pixelarea:
# #minipixelareaclass=i
# pixelarea=temparea
if kmeans.get() not in clusterdisplay:
tempdict={kmeans.get():displaylabels}
#clusterdisplay.update({''.join(choicelist):tempdict})
clusterdisplay.update(tempdict)
return displaylabels
def kmeansclassify():
global clusterdisplay,displaylabels
if int(kmeans.get())==0:
return
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcpara=pc_combine_up.get()
print(pcpara,type(pcpara))
tempband=np.zeros((pcah,pcaw,1))
# pcsel=buttonvar.get()+2
pcsel=buttonvar.get()
pcweights=pc_combine_up.get()-0.5
if pcweights==0.0:
tempband[:,:,0]=tempband[:,:,0]+originpcabands[:,:,pcsel]
else:
if pcweights<0.0: #RGBPC1
rgbpc=originpcabands[:,:,9]
else:
rgbpc=originpcabands[:,:,10]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweights)*2*rgbpc
colorpc=originpcabands[:,:,pcsel]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweights)*2)*colorpc
tempband[:,:,0]=tempband[:,:,0]+firstterm+secondterm
if int(kmeans.get())==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave('k=1.png',displaylabels)
else:
if int(kmeans.get())>1:
h,w,c=tempband.shape
print('shape',tempband.shape)
reshapedtif=tempband.reshape(tempband.shape[0]*tempband.shape[1],c)
if partialpca==True:
partialshape=reshapedtif[nonzero_vector]
print('partial reshape',partialshape.shape)
clf=KMeans(n_clusters=int(kmeans.get()),init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(partialshape)
reshapedtif[nonzero_vector,0]=np.add(tempdisplayimg.labels_,1)
print(reshapedtif[nonzero_vector])
displaylabels=reshapedtif.reshape((displaybandarray[currentfilename]['LabOstu'].shape[0],
displaybandarray[currentfilename]['LabOstu'].shape[1]))
# reshapedtif=cv2.resize(reshapedtif,(c,resizeshape[0]*resizeshape[1]),cv2.INTER_LINEAR)
clusterdict={}
displaylabels=displaylabels+10
for i in range(int(kmeans.get())):
locs=np.where(tempdisplayimg.labels_==i)
try:
maxval=partialshape[locs].max()
except:
print('kmeans',i)
messagebox.showerror('Cluster maximum value is ', i)
return displaylabels
print(maxval)
clusterdict.update({maxval:i+11})
print(clusterdict)
sortcluster=list(sorted(clusterdict))
print(sortcluster)
for i in range(len(sortcluster)):
cluster_num=clusterdict[sortcluster[i]]
displaylabels=np.where(displaylabels==cluster_num,i,displaylabels)
return displaylabels
else:
print('reshape',reshapedtif.shape)
clf=KMeans(n_clusters=int(kmeans.get()),init='k-means++',n_init=10,random_state=0)
tempdisplayimg=clf.fit(reshapedtif)
# print('label=0',np.any(tempdisplayimg==0))
displaylabels=tempdisplayimg.labels_.reshape((displaybandarray[currentfilename]['LabOstu'].shape[0],
displaybandarray[currentfilename]['LabOstu'].shape[1]))
# displaylabels=tempdisplayimg.labels_.reshape((resizeshape[1],resizeshape[0]))
clusterdict={}
displaylabels=displaylabels+10
for i in range(int(kmeans.get())):
locs=np.where(tempdisplayimg.labels_==i)
maxval=reshapedtif[locs].max()
print(maxval)
clusterdict.update({maxval:i+10})
print(clusterdict)
sortcluster=list(sorted(clusterdict))
print(sortcluster)
for i in range(len(sortcluster)):
cluster_num=clusterdict[sortcluster[i]]
displaylabels=np.where(displaylabels==cluster_num,i,displaylabels)
# if kmeans.get() not in clusterdisplay:
# tempdict={kmeans.get():displaylabels}
# #clusterdisplay.update({''.join(choicelist):tempdict})
# clusterdisplay.update(tempdict)
return displaylabels
def addcolorstrip():
global kmeanscanvasframe,havecolorstrip
if havecolorstrip is False:
colornum=int(kmeans.get())
for widget in kmeanscanvasframe.winfo_children():
widget.pack_forget()
widget.delete(ALL)
widget.config(width=350,height=10)
widget.create_image(3,0,image=colorstripdict['colorstrip'+str(colornum)],anchor=NW)
widget.pack()
havecolorstrip=True
def getPCs():
global displayimg,displaypclabels
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcweights=pc_combine_up.get()-0.5
tempband=np.zeros((pcah,pcaw))
# pcsel=buttonvar.get()+2
pcsel=buttonvar.get()
if pcweights==0.0:
tempband=tempband+originpcabands[:,:,pcsel]
else:
if pcweights<0.0: #RGBPC1
rgbpc=originpcabands[:,:,9]
else:
rgbpc=originpcabands[:,:,10]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweights)*2*rgbpc
colorpc=originpcabands[:,:,pcsel]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweights)*2)*colorpc
tempband=tempband+firstterm+secondterm
displaypclabels=np.copy(tempband)
displaylabels=np.copy(tempband)
pyplt.imsave('k=1.png',displaylabels)
colordivimg=np.copy(displaylabels)
print('origin pc range',colordivimg.max(),colordivimg.min())
# colordivimg=cv2.resize(tempcolorimg,(int(resizeshape[0]),int(resizeshape[1])))
print('pc range',colordivimg.max(),colordivimg.min())
if colordivimg.min()<0:
colordivimg=colordivimg-colordivimg.min()
colorrange=colordivimg.max()-colordivimg.min()
colordivimg=(colordivimg)*255/colorrange
colordivimg=Image.fromarray(colordivimg.astype('uint8'),'L')
colordivimg=colordivimg.resize((int(resizeshape[0]),int(resizeshape[1])),Image.ANTIALIAS)
displayimg['PCs']['Image']=ImageTk.PhotoImage(colordivimg)
# displayimg['Color Deviation']['Image']=ImageTk.PhotoImage(colordivimg)
def getPCs_olcversion():
global displayimg
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcacount={}
keys=list(pcaboxdict.keys())
for item in keys:
if pcaboxdict[item].get()=='1':
pcacount.update({item:pcaboxdict[item]})
pcakeys=list(pcacount.keys())
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
# if int(kmeans.get())==1:
print('kmeans=1')
displaylabels=np.mean(tempband,axis=2)
pyplt.imsave('k=1.png',displaylabels)
ratio=findratio([originpcabands.shape[0],originpcabands.shape[1]],[screenstd,screenstd])
tempcolorimg=np.copy(displaylabels)
colordivimg=np.zeros((displaylabels.shape[0],
displaylabels.shape[1]))
# if originpcabands.shape[0]*originpcabands.shape[1]<850*850:
# # tempdisplayimg=cv2.resize(originpcabands,(int(originpcabands.shape[1]*ratio),int(originpcabands.shape[0]*ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]*ratio),int(colordivimg.shape[0]*ratio)))
# else:
# # tempdisplayimg=cv2.resize(originpcabands,(int(originpcabands.shape[1]/ratio),int(originpcabands.shape[0]/ratio)))
# colordivimg=cv2.resize(tempcolorimg,(int(colordivimg.shape[1]/ratio),int(colordivimg.shape[0]/ratio)))
# if colordivimg.min()<0:
# if abs(colordivimg.min())<colordivimg.max():
# colordivimg=colordivimg-colordivimg.min()
colordivimg=cv2.resize(tempcolorimg,(int(resizeshape[0]),int(resizeshape[1])))
if colordivimg.min()<0:
colordivimg=colordivimg-colordivimg.min()
colorrange=colordivimg.max()-colordivimg.min()
colordivimg=colordivimg*255/colorrange
colordivimg=colordivimg.astype('uint8')
grayimg=Image.fromarray(colordivimg,'L')
displayimg['PCs']['Image']=ImageTk.PhotoImage(grayimg)
def changepca(event):
global clusterdisplay,colordicesband,oldpcachoice
global displaylabels
if len(oldpcachoice)>0:
keys=pcaboxdict.keys()
newlist=[]
for key in keys:
newlist.append(pcaboxdict[key].get())
samecount=0
print('oldlist',oldpcachoice)
print('newlist',newlist)
for i in range(len(oldpcachoice)):
if oldpcachoice[i]==newlist[i]:
samecount+=1
if samecount==len(oldpcachoice):
return
getPCs()
clusterdisplay={}
keys=pcaboxdict.keys()
oldpcachoice=[]
for key in keys:
oldpcachoice.append(pcaboxdict[key].get())
displaylabels=kmeansclassify()
colordicesband=np.copy(displaylabels)
generateimgplant()
return
def savePCAimg(path,originfile,file):
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
# pcacount={}
# keys=list(pcaboxdict.keys())
# for item in keys:
# if pcaboxdict[item].get()=='1':
# pcacount.update({item:pcaboxdict[item]})
# pcakeys=list(pcacount.keys())
# tempband=np.zeros((pcah,pcaw,len(pcakeys)))
# for i in range(len(pcakeys)):
# channel=int(pcakeys[i])-1
# tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
# displaylabels=np.mean(tempband,axis=2)
# generateimgplant(displaylabels)
# grayimg=(((displaylabels-displaylabels.min())/(displaylabels.max()-displaylabels.min()))*255.9).astype(np.uint8)
# pyplt.imsave('k=1.png',displaylabels.astype('uint8'))
# pyplt.imsave('k=1.png',grayimg)
pcweights=pc_combine_up.get()-0.5
tempband=np.zeros((pcah,pcaw))
# pcsel=buttonvar.get()+2
pcsel=buttonvar.get()
if pcweights==0.0:
tempband=tempband+originpcabands[:,:,pcsel]
else:
if pcweights<0.0: #RGBPC1
rgbpc=originpcabands[:,:,9]
else:
rgbpc=originpcabands[:,:,10]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweights)*2*rgbpc
colorpc=originpcabands[:,:,pcsel]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweights)*2)*colorpc
tempband=tempband+firstterm+secondterm
displaylabels=np.copy(tempband)
if displaylabels.min()<0:
# if abs(displaylabels.min())<displaylabels.max():
displaylabels=displaylabels-displaylabels.min()
colorrange=displaylabels.max()-displaylabels.min()
displaylabels=displaylabels*255/colorrange
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
originheight,originwidth=Multigraybands[file].size
origingray=grayimg.resize([originwidth,originheight],resample=Image.BILINEAR)
origingray.save(path+'/'+originfile+'-PCAimg.png',"PNG")
# addcolorstrip()
return
def changecluster(event):
global havecolorstrip,pre_checkbox,displaylabels,needreclass
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcweights=pc_combine_up.get()-0.5
tempband=np.zeros((pcah,pcaw,1))
# pcsel=buttonvar.get()+2
pcsel=buttonvar.get()
if pcweights==0.0:
tempband[:,:,0]=tempband[:,:,0]+originpcabands[:,:,pcsel]
else:
if pcweights<0.0: #RGBPC1
rgbpc=originpcabands[:,:,9]
else:
rgbpc=originpcabands[:,:,10]
rgbpc=(rgbpc-rgbpc.min())*255/(rgbpc.max()-rgbpc.min())
firstterm=abs(pcweights)*2*rgbpc
colorpc=originpcabands[:,:,pcsel]
colorpc=(colorpc-colorpc.min())*255/(colorpc.max()-colorpc.min())
secondterm=(1-abs(pcweights)*2)*colorpc
tempband[:,:,0]=tempband[:,:,0]+firstterm+secondterm
if int(kmeans.get())==1:
displaylabels=np.mean(tempband,axis=2)
generateimgplant(displaylabels)
print('max',displaylabels.max())
print('min',displaylabels.min())
if displaylabels.min()<0:
# if abs(displaylabels.min())<displaylabels.max():
displaylabels=displaylabels-displaylabels.min()
colorrange=displaylabels.max()-displaylabels.min()
displaylabels=displaylabels*255/colorrange
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
print('max',displaylabels.max())
print('min',displaylabels.min())
# grayimg.thumbnail((int(resizeshape[0]),int(resizeshape[1])),Image.ANTIALIAS)
grayimg.save('k=1.png',"PNG")
addcolorstrip()
return
else:
# if kmeans.get() in clusterdisplay:
# displaylabels=clusterdisplay[kmeans.get()]
#
# else:
# havecolorstrip=False
# # choicelist=[]
# #reshapemodified_tif=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0]*displaybandarray[currentfilename]['LabOstu'].shape[1],len(choicelist)))
# #displaylabels=kmeansclassify(choicelist,reshapemodified_tif)
# displaylabels=kmeansclassify()
displaylabels=kmeansclassify()
# changedisplayimg(imageframe,'Color Deviation')
global checkboxdict
keys=checkboxdict.keys()
for key in keys:
checkboxdict[key].set('0')
generateimgplant('')
# pyplt.imsave('allcolorindex.png',displaylabels)
#kmeanscanvas.update()
addcolorstrip()
return
def changecluster_oldversion(event):
global havecolorstrip,pre_checkbox
imageband=np.copy(displaybandarray[currentfilename]['LabOstu'])
if int(kmeans.get())==1:
originpcabands=displaybandarray[currentfilename]['LabOstu']
pcah,pcaw,pcac=originpcabands.shape
pcacount={}
keys=list(pcaboxdict.keys())
for item in keys:
if pcaboxdict[item].get()=='1':
pcacount.update({item:pcaboxdict[item]})
pcakeys=list(pcacount.keys())
tempband=np.zeros((pcah,pcaw,len(pcakeys)))
for i in range(len(pcakeys)):
channel=int(pcakeys[i])-1
tempband[:,:,i]=tempband[:,:,i]+originpcabands[:,:,channel]
displaylabels=np.mean(tempband,axis=2)
generateimgplant(displaylabels)
# grayimg=(((displaylabels-displaylabels.min())/(displaylabels.max()-displaylabels.min()))*255.9).astype(np.uint8)
# pyplt.imsave('k=1.png',displaylabels.astype('uint8'))
# pyplt.imsave('k=1.png',grayimg)
print('max',displaylabels.max())
print('min',displaylabels.min())
if displaylabels.min()<0:
# if abs(displaylabels.min())<displaylabels.max():
displaylabels=displaylabels-displaylabels.min()
colorrange=displaylabels.max()-displaylabels.min()
displaylabels=displaylabels*255/colorrange
grayimg=Image.fromarray(displaylabels.astype('uint8'),'L')
print('max',displaylabels.max())
print('min',displaylabels.min())
grayimg.save('k=1.png',"PNG")
# originheight,originwidth=Multigraybands[filenames[0]].size
# origingray=grayimg.resize([originwidth,originheight],resample=Image.BILINEAR)
# origingray.save('PCAimg.png',"PNG")
addcolorstrip()
return
else:
if kmeans.get() in clusterdisplay:
displaylabels=clusterdisplay[kmeans.get()]
if len(pre_checkbox)>0:
keys=checkboxdict.keys()
plantchoice=[]
for key in keys:
plantchoice.append(checkboxdict[key].get())
allsame=True
for i in range(len(pre_checkbox)):
if pre_checkbox[i]!=plantchoice[i]:
allsame=False
if allsame==True:
print('allsame=true')
return
else:
havecolorstrip=False
choicelist=[]
#reshapemodified_tif=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0]*displaybandarray[currentfilename]['LabOstu'].shape[1],len(choicelist)))
#displaylabels=kmeansclassify(choicelist,reshapemodified_tif)
displaylabels=kmeansclassify()
generateimgplant(displaylabels)
# pyplt.imsave('allcolorindex.png',displaylabels)
#kmeanscanvas.update()
addcolorstrip()
return
def showcounting(tup,number=True,frame=True,header=True,whext=False,blkext=False):
global multi_results,kernersizes#,pixelmmratio,kernersizes
global font
labels=tup[0]
counts=tup[1]
if len(mappath)>0:
colortable=tkintercorestat.get_mapcolortable(labels,elesize.copy(),labellist.copy())
else:
colortable=tup[2]
#colortable=labeldict[itervalue]['colortable']
if type(refarea)!=type(None):
colortable.update({65535:'Ref'})
labels[refarea]=65535
#labeldict=tup[0]
coinparts=tup[3]
filename=tup[4]
#currlabeldict=labeldict['iter'+str(int(itervar)-1)]
#print(currlabeldict)
#labels=currlabeldict['labels']
#counts=currlabeldict['counts']
#colortable=currlabeldict['colortable']
uniquelabels=list(colortable.keys())
originfile,extension=os.path.splitext(filename)
imgrsc=cv2.imread(filename,flags=cv2.IMREAD_ANYCOLOR)
imgrsc=cv2.cvtColor(imgrsc,cv2.COLOR_BGR2RGB)
imgrsc=cv2.resize(imgrsc,(labels.shape[1],labels.shape[0]),interpolation=cv2.INTER_LINEAR)
image=Image.fromarray(imgrsc)
if whext==True:
# blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
whbkg[:,:,:]=[255,255,255]
itemlocs=np.where(labels!=0)
# blkbkg[itemlocs]=imgrsc[itemlocs]
whbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(whbkg.astype('uint8'))
if blkext==True:
blkbkg=np.zeros((labels.shape[0],labels.shape[1],3),dtype='float')
itemlocs=np.where(labels!=0)
blkbkg[itemlocs]=imgrsc[itemlocs]
blkbkg[itemlocs]=imgrsc[itemlocs]
image=Image.fromarray(blkbkg.astype('uint8'))
#print('showcounting img',image.size)
#image.save('beforeresize.gif',append_images=[image])
#image=image.resize([labels.shape[1],labels.shape[0]],resample=Image.BILINEAR)
print('showcounting_resize',image.size)
image.save('beforlabel.gif',append_images=[image])
draw=ImageDraw.Draw(image)
#font=ImageFont.load_default()
sizeuniq,sizecounts=np.unique(labels,return_counts=True)
minsize=min(image.size[0],image.size[1])
suggsize=int(minsize**0.5)
# if suggsize>22:
# suggsize=22
# if suggsize<14:
# suggsize=14
#suggsize=8
#print('fontsize',suggsize)
# suggsize=22
font=ImageFont.truetype('cmb10.ttf',size=suggsize)
#if labels.shape[1]<850:
# font=ImageFont.truetype('cmb10.ttf',size=16)
#else:
# font=ImageFont.truetype('cmb10.ttf',size=22)
if len(coinparts)>0:
tempband=np.zeros(labels.shape)
coinkeys=coinparts.keys()
for coin in coinkeys:
coinlocs=coinparts[coin]
tempband[coinlocs]=1
global recborder
for uni in uniquelabels:
if uni!=0:
uni=colortable[uni]
if uni=='Ref':
pixelloc = np.where(labels == 65535)
else:
pixelloc = np.where(labels == uni)
try:
ulx = min(pixelloc[1])
except:
print('no pixellloc[1] on uni=',uni)
print('pixelloc =',pixelloc)
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
print(ulx, uly, rlx, rly)
if frame==True:
draw.polygon([(ulx,uly),(rlx,uly),(rlx,rly),(ulx,rly)],outline='red')
if number==True:
if uni in colortable:
canvastext = str(colortable[uni])
else:
# canvastext = 'No label'
canvastext=uni
canvastext=str(canvastext)
if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=font, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=font, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=font, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=font,fill='black')
if header==True:
if refarea is not None:
content='item count:'+str(len(uniquelabels)-1)+'\n File: '+filename
else:
content='item count:'+str(len(uniquelabels))+'\n File: '+filename
contentlength=len(content)+50
#rectext=canvas.create_text(10,10,fill='black',font='Times 16',text=content,anchor=NW)
draw.text((10-1, 10+1), text=content, font=font, fill='white')
draw.text((10+1, 10+1), text=content, font=font, fill='white')
draw.text((10-1, 10-1), text=content, font=font, fill='white')
draw.text((10+1, 10-1), text=content, font=font, fill='white')
#draw.text((10,10),text=content,font=font,fill=(141,2,31,0))
draw.text((10,10),text=content,font=font,fill='black')
#image.save(originfile+'-countresult'+extension,"JPEG")
#firstimg=Multigraybands[currentfilename]
#height,width=firstimg.size
height,width,channel=displaybandarray[filename]['LabOstu'].shape
ratio=findratio([height,width],[screenstd,screenstd])
#if labels.shape[0]*labels.shape[1]<850*850:
# disimage=image.resize([int(labels.shape[1]*ratio),int(labels.shape[0]*ratio)],resample=Image.BILINEAR)
#else:
# disimage=image.resize([int(labels.shape[1]/ratio),int(labels.shape[0]/ratio)],resample=Image.BILINEAR)
print('show counting ratio',ratio)
if height*width<screenstd*screenstd:
print('showcounting small')
disimage=image.resize([int(width*ratio),int(height*ratio)],resample=Image.BILINEAR)
else:
print('showcounting big')
disimage=image.resize([int(width/ratio),int(height/ratio)],resample=Image.BILINEAR)
print('showcounting shape',disimage.size)
displayoutput=ImageTk.PhotoImage(disimage)
disimage.save('output.gif',append_images=[disimage])
#image.save('originoutput.gif',append_images=[image])
return displayoutput,image,disimage
#displayimg['Output']=displayoutput
#changedisplayimg(imageframe,'Output')
#time.sleep(5)
#image.show()
def changeoutputimg(file,intnum):
outputimg=outputimgdict[file]['iter'+str(int(intnum)-1)]
tempdict={}
tempdict.update({'Size':displayimg['ColorIndices']['Size']})
tempdict.update({'Image':outputimg})
displayimg['Output']=tempdict
changedisplayimg(imageframe,'Output')
def export_ext(iterver,path,whext=False,blkext=False):
suggsize=8
print('fontsize',suggsize)
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
files=multi_results.keys()
# path=filedialog.askdirectory()
for file in files:
labeldict=multi_results[file][0]
totalitervalue=len(list(labeldict.keys()))
#itervalue='iter'+str(int(iterver.get())-1)
#itervalue='iter'+str(totalitervalue-1)
#itervalue=int(iterver.get())
itervalue='iter'+iterver
print(itervalue)
print(labeldict)
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
if len(mappath)>0:
colortable=tkintercorestat.get_mapcolortable(labels,elesize.copy(),labellist.copy())
else:
colortable=labeldict[itervalue]['colortable']
#originheight,originwidth=Multigraybands[file].size
#copylabels=np.copy(labels)
#copylabels[refarea]=65535
#labels=cv2.resize(copylabels.astype('float32'),dsize=(originwidth,originheight),interpolation=cv2.INTER_LINEAR)
head_tail=os.path.split(file)
originfile,extension=os.path.splitext(head_tail[1])
if len(path)>0:
tup=(labels,counts,colortable,[],currentfilename)
_band,segimg,small_segimg=showcounting(tup,False,True,True,whext,blkext)
#imageband=outputimgbands[file][itervalue]
imageband=segimg
draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
tempdict={}
if refarea is not None:
specarea=float(sizeentry.get())
pixelmmratio=(specarea/len(refarea[0]))**0.5
else:
pixelmmratio=1.0
#print('coinsize',coinsize.get(),'pixelmmratio',pixelmmratio)
print('pixelmmratio',pixelmmratio)
for uni in uniquelabels:
if uni !=0:
tempuni=colortable[uni]
if tempuni=='Ref':
pixelloc=np.where(labels==65535)
else:
pixelloc = np.where(labels == float(uni))
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
print(ulx, uly, rlx, rly)
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
length={}
currborder=tkintercore.get_boundaryloc(labels,uni)
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
templength=float(((currborder[0][i]-currborder[0][j])**2+(currborder[1][i]-currborder[1][j])**2)**0.5)
length.update({(i,j):templength})
sortedlength=sorted(length,key=length.get,reverse=True)
try:
topcouple=sortedlength[0]
except:
continue
kernellength=length[topcouple]
i=topcouple[0]
j=topcouple[1]
x0=currborder[1][i]
y0=currborder[0][i]
x1=currborder[1][j]
y1=currborder[0][j]
#slope=float((y0-y1)/(x0-x1))
#linepoints=[(currborder[1][i],currborder[0][i]),(currborder[1][j],currborder[0][j])]
#draw.line(linepoints,fill='yellow')
#points=linepixels(currborder[1][i],currborder[0][i],currborder[1][j],currborder[0][j])
lengthpoints=cal_kernelsize.bresenhamline(x0,y0,x1,y1) #x0,y0,x1,y1
for point in lengthpoints:
if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='yellow')
# abovecenter=[]
# lowercenter=[]
# for i in range(len(currborder[0])):
# for j in range(len(lengthpoints)):
# if currborder[0][i]<lengthpoints[j][1]:
# lowercenter.append((currborder[1][i],currborder[0][i])) #append(x,y)
# break
# loc=(currborder[1][i],currborder[0][i])
# if loc not in abovecenter and loc not in lowercenter:
# abovecenter.append(loc)
othodict={}
# widthdict={}
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
wx0=currborder[1][i]
wy0=currborder[0][i]
wx1=currborder[1][j]
wy1=currborder[0][j]
u1=x1-x0
u2=y1-y0
v1=wx1-wx0
v2=wy1-wy0
otho=abs(u1*v1+u2*v2)/(((u1**2+u2**2)**0.5)*(v1**2+v2**2)**0.5)
wlength=float((wx0-wx1)**2+(wy0-wy1)**2)**0.5
if otho<=0.13:
othodict.update({(wx0,wy0,wx1,wy1):wlength})
sortedwidth=sorted(othodict,key=othodict.get,reverse=True)
try:
topwidth=sortedwidth[0]
except:
continue
widepoints=cal_kernelsize.bresenhamline(topwidth[0],topwidth[1],topwidth[2],topwidth[3])
for point in widepoints:
if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='black')
width=othodict[topwidth]
print('width',width,'length',kernellength)
print('kernelwidth='+str(width*pixelmmratio))
print('kernellength='+str(kernellength*pixelmmratio))
#print('kernelwidth='+str(kernelwidth*pixelmmratio))
tempdict.update({colortable[uni]:[kernellength,width,pixelmmratio**2*len(pixelloc[0]),kernellength*pixelmmratio,width*pixelmmratio]})
#if uni in colortable:
canvastext = str(colortable[uni])
#else:
# canvastext = uni
if imgtypevar.get()=='0':
draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=smallfont,fill='black')
#print(event.x, event.y, labels[event.x, event.y], ulx, uly, rlx, rly)
#recborder = canvas.create_rectangle(ulx, uly, rlx, rly, outline='red')
#drawcontents.append(recborder)
kernersizes.update({file:tempdict})
originheight,originwidth=Multigraybands[file].size
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
extcolor=""
if whext==True:
extcolor= "-extwht"
if blkext==True:
extcolor="-extblk"
image.save(path+'/'+originfile+extcolor+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],currentfilename)
_band,segimg,small_segimg=showcounting(tup,False,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=showcounting(tup,True,True,True,whext,blkext)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+extcolor+'-labelresult'+'.png',"PNG")
def export_result(iterver):
global batch
if proc_mode[proc_name].get()=='1':
batchprocess.batch_exportpath()
return
suggsize=8
print('fontsize',suggsize)
smallfont=ImageFont.truetype('cmb10.ttf',size=suggsize)
files=multi_results.keys()
path=filedialog.askdirectory()
root.update()
# export_ext(iterver,path,True,False)
# export_ext(iterver,path,False,True)
for file in files:
labeldict=multi_results[file][0]
totalitervalue=len(list(labeldict.keys()))
#itervalue='iter'+str(int(iterver.get())-1)
#itervalue='iter'+str(totalitervalue-1)
#itervalue=int(iterver.get())
itervalue='iter'+iterver
print(itervalue)
print(labeldict)
labels=labeldict[itervalue]['labels']
counts=labeldict[itervalue]['counts']
if len(mappath)>0:
colortable=tkintercorestat.get_mapcolortable(labels,elesize.copy(),labellist.copy())
else:
colortable=labeldict[itervalue]['colortable']
#originheight,originwidth=Multigraybands[file].size
#copylabels=np.copy(labels)
#copylabels[refarea]=65535
#labels=cv2.resize(copylabels.astype('float32'),dsize=(originwidth,originheight),interpolation=cv2.INTER_LINEAR)
head_tail=os.path.split(file)
originfile,extension=os.path.splitext(head_tail[1])
originimg_crop=cv2.imread(file)
uniquelabels=list(colortable.keys())
originheight,originwidth=Multigraybands[file].size
ratio=int(findratio([512,512],[labels.shape[0],labels.shape[1]]))
if labels.shape[0]<512:
cache=(np.zeros((labels.shape[0]*ratio,labels.shape[1]*ratio)),{"f":int(ratio),"stride":int(ratio)})
convband=tkintercorestat.pool_backward(labels,cache)
else:
if labels.shape[0]>512:
convband=cv2.resize(labels,(512,512),interpolation=cv2.INTER_LINEAR)
else:
if labels.shape[0]==512:
convband=np.copy(labels)
locfilename=path+'/'+originfile+'-pixellocs.csv'
#from spectral import imshow, view_cube
'''hyperspectral img process'''
# import spectral.io.envi as envi
lesszeroonefive=[]
with open(locfilename,mode='w') as f:
csvwriter=csv.writer(f)
rowcontent=['id','locs']
csvwriter.writerow(rowcontent)
# result_ref=envi.open(head_tail[0]+'/'+originfile+'/results/REFLECTANCE_'+originfile+'.hdr', head_tail[0]+'/'+originfile+'/results/REFLECTANCE_'+originfile+'.dat')
# result_nparr=np.array(result_ref.load())
# corrected_nparr=np.copy(result_nparr)
for uni in uniquelabels:
if uni!=0:
tempuni=colortable[uni]
if tempuni=='Ref':
pixelloc = np.where(convband == 65535)
else:
pixelloc = np.where(convband == float(uni))
# kernelval=corrected_nparr[pixelloc]
# nirs=np.mean(kernelval,axis=0)
# print('nirs 170',nirs[170])
# if nirs[170]<0.15:
# lesszeroonefive.append(uni)
rowcontent=[colortable[uni]]
rowcontent=rowcontent+list(pixelloc[0])
csvwriter.writerow(rowcontent)
rowcontent=[colortable[uni]]
rowcontent=rowcontent+list(pixelloc[1])
csvwriter.writerow(rowcontent)
f.close()
# print(lesszeroonefive)
'''end'''
if len(path)>0:
tup=(labels,counts,colortable,[],currentfilename)
_band,segimg,small_segimg=showcounting(tup,False)
#imageband=outputimgbands[file][itervalue]
imageband=segimg
draw=ImageDraw.Draw(imageband)
uniquelabels=list(colortable.keys())
tempdict={}
if refarea is not None:
specarea=float(sizeentry.get())
pixelmmratio=(specarea/len(refarea[0]))**0.5
else:
pixelmmratio=1.0
#print('coinsize',coinsize.get(),'pixelmmratio',pixelmmratio)
print('pixelmmratio',pixelmmratio)
for uni in uniquelabels:
if uni !=0:
#uni=colortable[uni]
tempuni=colortable[uni]
if tempuni=='Ref':
pixelloc = np.where(labels == 65535)
else:
pixelloc = np.where(labels == float(uni))
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
print(ulx, uly, rlx, rly)
midx = ulx + int((rlx - ulx) / 2)
midy = uly + int((rly - uly) / 2)
length={}
currborder=tkintercore.get_boundaryloc(labels,uni)
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
templength=float(((currborder[0][i]-currborder[0][j])**2+(currborder[1][i]-currborder[1][j])**2)**0.5)
length.update({(i,j):templength})
sortedlength=sorted(length,key=length.get,reverse=True)
try:
topcouple=sortedlength[0]
except:
continue
kernellength=length[topcouple]
i=topcouple[0]
j=topcouple[1]
x0=currborder[1][i]
y0=currborder[0][i]
x1=currborder[1][j]
y1=currborder[0][j]
#slope=float((y0-y1)/(x0-x1))
linepoints=[(currborder[1][i],currborder[0][i]),(currborder[1][j],currborder[0][j])]
#draw.line(linepoints,fill='yellow')
#points=linepixels(currborder[1][i],currborder[0][i],currborder[1][j],currborder[0][j])
lengthpoints=cal_kernelsize.bresenhamline(x0,y0,x1,y1) #x0,y0,x1,y1
for point in lengthpoints:
if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='yellow')
othodict={}
# widthdict={}
for i in range(len(currborder[0])):
for j in range(i+1,len(currborder[0])):
wx0=currborder[1][i]
wy0=currborder[0][i]
wx1=currborder[1][j]
wy1=currborder[0][j]
u1=x1-x0
u2=y1-y0
v1=wx1-wx0
v2=wy1-wy0
otho=abs(u1*v1+u2*v2)/(((u1**2+u2**2)**0.5)*(v1**2+v2**2)**0.5)
wlength=float((wx0-wx1)**2+(wy0-wy1)**2)**0.5
if otho<=0.13:
othodict.update({(wx0,wy0,wx1,wy1):wlength})
sortedwidth=sorted(othodict,key=othodict.get,reverse=True)
try:
topwidth=sortedwidth[0]
except:
continue
widepoints=cal_kernelsize.bresenhamline(topwidth[0],topwidth[1],topwidth[2],topwidth[3])
for point in widepoints:
if imgtypevar.get()=='0':
draw.point([int(point[0]),int(point[1])],fill='black')
width=othodict[topwidth]
print('width',width,'length',kernellength)
print('kernelwidth='+str(width*pixelmmratio))
print('kernellength='+str(kernellength*pixelmmratio))
#print('kernelwidth='+str(kernelwidth*pixelmmratio))
tempdict.update({colortable[uni]:[kernellength,width,pixelmmratio**2*len(pixelloc[0]),kernellength*pixelmmratio,width*pixelmmratio]})
#if uni in colortable:
canvastext = str(colortable[uni])
# else:
# canvastext = 'No label'
# canvastext = uni
if imgtypevar.get()=='0':
if uni in lesszeroonefive:
draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=smallfont,fill='red')
else:
draw.text((midx-1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy+1), text=canvastext, font=smallfont, fill='white')
draw.text((midx-1, midy-1), text=canvastext, font=smallfont, fill='white')
draw.text((midx+1, midy-1), text=canvastext, font=smallfont, fill='white')
#draw.text((midx,midy),text=canvastext,font=font,fill=(141,2,31,0))
draw.text((midx,midy),text=canvastext,font=smallfont,fill='black')
#print(event.x, event.y, labels[event.x, event.y], ulx, uly, rlx, rly)
#recborder = canvas.create_rectangle(ulx, uly, rlx, rly, outline='red')
#drawcontents.append(recborder)
kernersizes.update({file:tempdict})
image=imageband.resize([originwidth,originheight],resample=Image.BILINEAR)
image.save(path+'/'+originfile+'-sizeresult'+'.png',"PNG")
tup=(labels,counts,colortable,[],currentfilename)
_band,segimg,small_segimg=showcounting(tup,False)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+'-segmentresult'+'.png',"PNG")
_band,segimg,small_segimg=showcounting(tup,True)
segimage=segimg.resize([originwidth,originheight],resample=Image.BILINEAR)
segimage.save(path+'/'+originfile+'-labelresult'+'.png',"PNG")
originrestoredband=np.copy(labels)
restoredband=originrestoredband.astype('uint8')
colordiv=np.zeros((colordicesband.shape[0],colordicesband.shape[1],3))
savePCAimg(path,originfile,file)
# kvar=int(kmeans.get())
# print('kvar',kvar)
# for i in range(kvar):
# locs=np.where(colordicesband==i)
# colordiv[locs]=colorbandtable[i]
# colordivimg=Image.fromarray(colordiv.astype('uint8'))
# colordivimg.save(path+'/'+originfile+'-colordevice'+'.jpeg',"JPEG")
colordivimg=Image.open('allcolorindex.png')
copycolordiv=colordivimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copycolordiv.save(path+'/'+originfile+'-colordevice'+'.png',"PNG")
#pyplt.imsave(path+'/'+originfile+'-colordevice'+'.png',colordiv.astype('uint8'))
# copybinary=np.zeros((originbinaryimg.shape[0],originbinaryimg.shape[1],3),dtype='float')
# nonzeros=np.where(originbinaryimg==1)
# copybinary[nonzeros]=[255,255,0]
# binaryimg=Image.fromarray(copybinary.astype('uint8'))
binaryimg=Image.open('binaryimg.png')
copybinaryimg=binaryimg.resize([originwidth,originheight],resample=Image.BILINEAR)
copybinaryimg.save(path+'/'+originfile+'-binaryimg'+'.png',"PNG")
# pyplt.imsave(path+'/'+originfile+'-binaryimg'+'.png',originbinaryimg.astype('uint8'))
#restoredband=cv2.resize(src=restoredband,dsize=(originwidth,originheight),interpolation=cv2.INTER_LINEAR)
print(restoredband.shape)
currentsizes=kernersizes[file]
indicekeys=list(originbandarray[file].keys())
indeclist=[ 0 for i in range(len(indicekeys)*3)]
pcalist=[0 for i in range(3)]
# temppcabands=np.zeros((originpcabands[file].shape[0],len(batch['PCs'])))
# temppcabands=np.zeros(originpcabands[file].shape[0],1)
# for i in range(len(batch['PCs'])):
# temppcabands[:,i]=temppcabands[:,i]+originpcabands[file][:,batch['PCs'][i]-1]
pcabands=np.copy(displaypclabels)
# pcabands=pcabands.reshape((originheight,originwidth))
# pcabands=pcabands.reshape(displayfea_l,displayfea_w)
colorindices_cal(file)
colorindicekeys=list(colorindicearray[file].keys())
colorindicelist=[ 0 for i in range(len(colorindicekeys)*3)]
datatable={}
origindata={}
for key in indicekeys:
data=originbandarray[file][key]
data=data.tolist()
tempdict={key:data}
origindata.update(tempdict)
print(key)
for key in colorindicekeys:
data=colorindicearray[file][key]
data=data.tolist()
tempdict={key:data}
origindata.update(tempdict)
print(key)
# for uni in colortable:
print(uniquelabels)
print('len uniquelabels',len(uniquelabels))
for uni in uniquelabels:
print(uni,colortable[uni])
uni=colortable[uni]
if uni=='Ref':
uniloc=np.where(labels==65535)
smalluniloc=np.where(originrestoredband==65535)
else:
uniloc=np.where(labels==float(uni))
smalluniloc=np.where(originrestoredband==uni)
if len(uniloc)==0 or len(uniloc[1])==0:
print('no uniloc\n')
print(uniloc[0],uniloc[1])
continue
ulx,uly=min(smalluniloc[1]),min(smalluniloc[0])
rlx,rly=max(smalluniloc[1]),max(smalluniloc[0])
width=rlx-ulx
length=rly-uly
print(width,length)
subarea=restoredband[uly:rly+1,ulx:rlx+1]
subarea=subarea.tolist()
amount=len(uniloc[0])
print(amount)
try:
sizes=currentsizes[uni]
except:
print('no sizes\n')
continue
#templist=[amount,length,width]
templist=[amount,sizes[0],sizes[1],sizes[2],sizes[3],sizes[4]]
# tempdict={colortable[uni]:templist+indeclist+colorindicelist+pcalist} #NIR,Redeyes,R,G,B,NDVI,area
tempdict={uni:templist+indeclist+colorindicelist+pcalist} #NIR,Redeyes,R,G,B,NDVI,area
print(tempdict)
indicekeys=list(origindata.keys())
for ki in range(len(indicekeys)):
originNDVI=origindata[indicekeys[ki]]
print('originNDVI size',len(originNDVI),len(originNDVI[0]))
pixellist=[]
for k in range(len(uniloc[0])):
#print(uniloc[0][k],uniloc[1][k])
try:
# tempdict[colortable[uni]][6+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
tempdict[uni][6+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
# tempdict[colortable[uni]][7+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
tempdict[uni][7+ki*3]+=originNDVI[uniloc[0][k]][uniloc[1][k]]
pixellist.append(originNDVI[uniloc[0][k]][uniloc[1][k]])
# tempdict[colortable[uni]][ki*3+6]=tempdict[colortable[uni]][ki*3+6]/amount
# tempdict[colortable[uni]][ki*3+8]=np.std(pixellist)
tempdict[uni][ki*3+6]=tempdict[uni][ki*3+6]/amount
tempdict[uni][ki*3+8]=np.std(pixellist)
pixellist=[]
for k in range(len(uniloc[0])):
try:
# tempdict[colortable[uni]][-2]+=pcabands[uniloc[0][k]][uniloc[1][k]]
tempdict[uni][-2]+=pcabands[uniloc[0][k]][uniloc[1][k]]
except IndexError:
print(uniloc[0][k],uniloc[1][k])
# tempdict[colortable[uni]][-3]+=pcabands[uniloc[0][k]][uniloc[1][k]]
tempdict[uni][-3]+=pcabands[uniloc[0][k]][uniloc[1][k]]
pixellist.append(pcabands[uniloc[0][k]][uniloc[1][k]])
# tempdict[colortable[uni]][-3]=tempdict[colortable[uni]][-3]/amount
# tempdict[colortable[uni]][-1]=np.std(pixellist)
tempdict[uni][-3]=tempdict[uni][-3]/amount
tempdict[uni][-1]=np.std(pixellist)
datatable.update(tempdict)
filename=path+'/'+originfile+'-outputdata.csv'
with open(filename,mode='w') as f:
csvwriter=csv.writer(f)
rowcontent=['Index','Plot','Area(#pixel)','Length(#pixel)','Width(#pixel)','Area(mm2)','Length(mm)','Width(mm)']
for key in indicekeys:
rowcontent.append('avg-'+str(key))
rowcontent.append('sum-'+str(key))
rowcontent.append('std-'+str(key))
rowcontent.append('avg-PCA')
rowcontent.append('sum-PCA')
rowcontent.append('std-PCA')
#csvwriter.writerow(['ID','NIR','Red Edge','Red','Green','Blue','NIRv.s.Green','LabOstu','area(#of pixel)'])
#csvwriter.writerow(['Index','Plot','Area(#pixels)','avg-NDVI','sum-NDVI','std-NDVI','Length(#pixel)','Width(#pixel)'])#,'#holes'])
csvwriter.writerow(rowcontent)
i=1
for uni in datatable:
row=[i,uni]
for j in range(len(datatable[uni])):
row.append(datatable[uni][j])
#row=[i,uni,datatable[uni][0],datatable[uni][1],datatable[uni][2],datatable[uni][5],datatable[uni][3],datatable[uni][4]]#,
#datatable[uni][5]]
i+=1
print(row)
csvwriter.writerow(row)
print('total data length=',len(datatable))
# messagebox.showinfo('Saved',message='Results are saved to '+path)
tx=root.winfo_x()
ty=root.winfo_y()
top=Toplevel()
top.attributes("-topmost",True)
w = 300
h = 150
dx=100
dy=100
top.geometry("%dx%d+%d+%d" % (w, h, tx + dx, ty + dy))
top.title('Saved')
Message(top,text='Results are saved to '+path,padx=20,pady=20).pack()
okbut=Button(top,text='Okay',command=top.destroy)
okbut.pack(side=BOTTOM)
top.after(10000,top.destroy)
thresholds=[cal_xvalue(linelocs[0]),cal_xvalue(linelocs[1])]
minthres=min(thresholds)
maxthres=max(thresholds)
lwthresholds=[cal_yvalue(linelocs[2]),cal_yvalue(linelocs[3])]
maxlw=max(lwthresholds)
minlw=min(lwthresholds)
batch['Area_max']=[maxthres]
batch['Area_min']=[minthres]
batch['shape_max']=[maxlw]
batch['shape_min']=[minlw]
print('batch',batch)
batchfile=path+'/'+originfile+'-batch'+'.txt'
with open(batchfile,'w') as f:
for key in batch.keys():
f.write(key)
f.write(',')
for i in range(len(batch[key])):
f.write(str(batch[key][i]))
f.write(',')
f.write('\n')
f.close()
def resegment(thresholds=[],lwthresholds=[]):
global loccanvas,maxx,minx,maxy,miny,linelocs,bins,ybins,reseglabels,figcanvas,refvar,refsubframe,panelA
global labelplotmap,figdotlist,multi_results
global batch
global outputimgdict,outputimgbands
figcanvas.unbind('<Any-Enter>')
figcanvas.unbind('<Any-Leave>')
figcanvas.unbind('<Button-1>')
figcanvas.unbind('<B1-Motion>')
figcanvas.unbind('<Shift-Button-1>')
figcanvas.delete(ALL)
#panelA.unbind('<Button-1>')
#refvar.set('0')
#for widget in refsubframe.winfo_children():
# widget.config(state=DISABLED)
if len(thresholds)==0:
thresholds=[cal_xvalue(linelocs[0]),cal_xvalue(linelocs[1])]
minthres=min(thresholds)
maxthres=max(thresholds)
if len(lwthresholds)==0:
lwthresholds=[cal_yvalue(linelocs[2]),cal_yvalue(linelocs[3])]
maxlw=max(lwthresholds)
minlw=min(lwthresholds)
print(minthres,maxthres)
#labels=np.copy(reseglabels)
labels=np.copy(reseglabels)
#if reseglabels is None:
# reseglabels,border,colortable,labeldict=tkintercorestat.resegmentinput(labels,minthres,maxthres,minlw,maxlw)
if refarea is not None:
labels[refarea]=0
# if segmentratio>1:
# workingimg=cv2.resize(labels,(int(labels.shape[1]/segmentratio),int(labels.shape[0]/segmentratio)),interpolation=cv2.INTER_LINEAR)
# else:
# workingimg=np.copy(labels)
if refarea is None:
retrivearea=np.where(labels==65535)
if len(retrivearea[1])>0:
ulx,uly=min(retrivearea[1]),min(retrivearea[0])
rlx,rly=max(retrivearea[1]),max(retrivearea[0])
rtl=rly-uly
rtw=rlx-ulx
rtd=(rtl**2+rtw**2)**0.5
rtarea=len(retrivearea[0])
print('rtarea,rtl,rtw,rtd',rtarea,rtl,rtw,rtd)
if rtarea>maxthres:
maxthres=rtarea
if rtd>maxlw:
maxlw=rtd
if rtarea<minthres:
minthres=rtarea
if rtd<minlw:
minlw=rtd
reseglabels,border,colortable,labeldict=tkintercorestat.resegmentinput(labels,minthres,maxthres,minlw,maxlw)
# if segmentratio>1:
# cache=(np.zeros(labels.shape),{"f":int(segmentratio),"stride":int(segmentratio)})
# reseglabels=tkintercorestat.pool_backward(reseglabels,cache)
# #labeldict['iter0']['labels']=reseglabels
multi_results.update({currentfilename:(labeldict,{})})
iterkeys=list(labeldict.keys())
iternum=len(iterkeys)
print(labeldict)
#iternum=3
tempimgdict={}
tempimgbands={}
tempsmall={}
for key in labeldict:
tup=(labeldict[key]['labels'],labeldict[key]['counts'],labeldict[key]['colortable'],{},currentfilename)
outputdisplay,outputimg,small_seg=showcounting(tup,False,True,True)
tempimgdict.update({key:outputdisplay})
tempimgbands.update({key:outputimg})
tempsmall.update({key:small_seg})
outputimgdict.update({currentfilename:tempimgdict})
outputimgbands.update({currentfilename:tempimgbands})
outputsegbands.update({currentfilename:tempsmall})
changeoutputimg(currentfilename,'1')
'''
data=np.asarray(border[1:])
hist,bin_edges=np.histogram(data,density=False)
#figcanvas=Canvas(frame,width=400,height=350,bg='white')
#figcanvas.pack()
restoplot=createBins.createBins(hist.tolist(),bin_edges.tolist(),len(bin_edges))
minx,maxx=histograms.plot(restoplot,hist.tolist(),bin_edges.tolist(),figcanvas)
bins=bin_edges.tolist()
loccanvas=figcanvas
linelocs=[minx,maxx]
'''
# displayfig()
# data=[]
# uniquelabels=list(colortable.keys())
# lenwid=[]
# lenlist=[]
# widlist=[]
# labelplotmap={}
# templabelplotmap={}
# unitable=[]
# for uni in uniquelabels:
# if uni!=0:
# pixelloc = np.where(reseglabels == uni)
# try:
# ulx = min(pixelloc[1])
# except:
# continue
# uly = min(pixelloc[0])
# rlx = max(pixelloc[1])
# rly = max(pixelloc[0])
# length=rly-uly
# width=rlx-ulx
# lenwid.append((length+width))
# lenlist.append(length)
# widlist.append(width)
# data.append(len(pixelloc[0]))
# unitable.append(uni)
# # templabelplotmap.update({(len(pixelloc[0]),length+width):uni})
# residual,area=lm_method.lm_method(lenlist,widlist,data)
# lenwid=list(residual)
# data=list(area)
# for i in range(len(unitable)):
# templabelplotmap.update({(data[i],lenwid[i]):unitable[i]})
# miny=min(lenwid)
# maxy=max(lenwid)
# minx=min(data)
# maxx=max(data)
# binwidth=(maxx-minx)/10
# ybinwidth=(maxy-miny)/10
# bin_edges=[]
# y_bins=[]
# for i in range(0,11):
# bin_edges.append(minx+i*binwidth)
# for i in range(0,11):
# y_bins.append(miny+i*ybinwidth)
# #bin_edges.append(maxx)
# #bin_edges.append(maxx+binwidth)
# #y_bins.append(maxy)
# #y_bins.append(maxy+ybinwidth)
# plotdata=[]
# for i in range(len(data)):
# plotdata.append((data[i],lenwid[i]))
# scaledDatalist=[]
# try:
# x_scalefactor=300/(maxx-minx)
# except:
# return
# y_scalefactor=250/(maxy-miny)
# for (x,y) in plotdata:
# xval=50+(x-minx)*x_scalefactor+50
# yval=300-(y-miny)*y_scalefactor+25
# scaledDatalist.append((int(xval),int(yval)))
# for key in templabelplotmap:
# x=key[0]
# y=key[1]
# xval=50+(x-minx)*x_scalefactor+50
# yval=300-(y-miny)*y_scalefactor+25
# unilabel=templabelplotmap[key]
# labelplotmap.update({(int(xval),int(yval)):unilabel})
# figdotlist={}
# axistest.drawdots(25+50,325+25,375+50,25+25,bin_edges,y_bins,scaledDatalist,figcanvas,figdotlist)
#
#
# #loccanvas=figcanvas
# #minx=25
# #maxx=375
# #maxy=325
# #miny=25
# #linelocs=[25+12,375-12,325-12,25+12]
# #linelocs=[25+12,375-12,25+12,325-12]
# linelocs=[75+12,425-12,350-12,50+12]
# bins=bin_edges
# ybins=y_bins
#
# figcanvas.bind('<Any-Enter>',item_enter)
# figcanvas.bind('<Any-Leave>',item_leave)
# figcanvas.bind('<Button-1>',item_start_drag)
# figcanvas.bind('<B1-Motion>',item_drag)
# #figcanvas.bind('<Shift-Button-1>',item_multiselect)
# if refarea is not None:
# reseglabels[refarea]=65535
#
# pcasel=[]
# pcakeys=list(pcaboxdict.keys())
# for i in range(len(pcakeys)):
# currvar=pcaboxdict[pcakeys[i]].get()
# if currvar=='1':
# pcasel.append(i+1)
# kchoice=[]
# kchoicekeys=list(checkboxdict.keys())
# for i in range(len(kchoicekeys)):
# currvar=checkboxdict[kchoicekeys[i]].get()
# if currvar=='1':
# kchoice.append(i+1)
# batch['PCs']=pcasel.copy()
# batch['Kmeans']=[int(kmeans.get())]
# batch['Kmeans_sel']=kchoice.copy()
# batch['Area_max']=[maxthres]
# batch['Area_min']=[minthres]
# # batch['L+W_max']=[maxlw]
# # batch['L+W_min']=[minlw]
# print(batch)
def cal_yvalue(y):
y_scalefactor=250/(maxy-miny)
yval=(300+25-y)/y_scalefactor+miny
return yval
def cal_xvalue(x):
#print(maxx,minx,max(bins),min(bins))
#binwidth=(maxx-minx)/(max(bins)-min(bins))
#binwidth=(max(bins)-min(bins))/12
#print(x,minx,binwidth)
#xloc=((x-minx)/binwidth)
#print(xloc,min(bins))
#value=min(bins)+xloc*binwidth
#print(value)
print(x)
x_scalefactor=300/(maxx-minx)
print(x_scalefactor)
xval=(x-50-50)/x_scalefactor+minx
#print(x,xval)
return xval
def item_enter(event):
global figcanvas
figcanvas.config(cursor='hand2')
figcanvas._restorItem=None
figcanvas._restoreOpts=None
itemType=figcanvas.type(CURRENT)
#print(itemType)
pass
def item_leave(event):
global figcanvas
pass
def item_multiselect(event):
global dotflash
print(event.type,'event item_multiselect')
currx=event.x
curry=event.y
print('mul_x',currx,'mul_y',curry)
if (currx,curry) in labelplotmap: #or (currx-1,curry) in labelplotmap or (currx+1,curry) in labelplotmap\
#or (currx,curry-1) in labelplotmap or (currx,curry+1) in labelplotmap:
labelkey=labelplotmap[(currx,curry)]
else:
plotlist=list(labelplotmap.keys())
distlist=[]
for i in range(len(plotlist)):
dist=(abs(currx-plotlist[i][0])+abs(curry-plotlist[i][1]))**0.5
distlist.append(dist)
shortestdist=min(distlist)
shortestdistindex=distlist.index(shortestdist)
labelkey=labelplotmap[plotlist[shortestdistindex]]
#if len(dotflash)>0:
# for i in range(len(dotflash)):
# figcanvas.delete(dotflash.pop(0))
dotx=plotlist[shortestdistindex][0]
doty=plotlist[shortestdistindex][1]
a=figcanvas.create_oval(dotx-1,doty-1,dotx+1,doty+1,width=1,outline='Orange',fill='Orange')
dotflash.append(a)
print(labelkey)
seedfigflash(labelkey,True)
def item_start_drag(event):
global figcanvas,linelocs,dotflash
itemType=figcanvas.type(CURRENT)
print(itemType)
print(event.type,'event start_drag')
if itemType=='line':
fill=figcanvas.itemconfigure(CURRENT,'fill')[4]
dash=figcanvas.itemconfigure(CURRENT,'dash')[4]
print('dashlen',len(dash))
if fill=='red' and len(dash)>0:
figcanvas._lastX=event.x
#loccanvas._lastY=event.y
linelocs[0]=event.x
if fill=='red' and len(dash)==0:
figcanvas._lastX=event.x
#loccanvas._lastY=event.y
linelocs[1]=event.x
if fill=='blue' and len(dash)>0:
figcanvas._lastY=event.y
linelocs[2]=event.y
#print('blue')
if fill=='blue' and len(dash)==0:
figcanvas._lastY=event.y
linelocs[3]=event.y
#print('purple')
#if fill!='red' and fill!='orange':
# figcanvas._lastX=None
#if fill!='blue' and fill!='purple':
# figcanvas._lastY=None
print('linelocs',linelocs)
else:
currx=event.x
curry=event.y
print('x',currx,'y',curry)
if (currx,curry) in labelplotmap: #or (currx-1,curry) in labelplotmap or (currx+1,curry) in labelplotmap\
#or (currx,curry-1) in labelplotmap or (currx,curry+1) in labelplotmap:
labelkey=labelplotmap[(currx,curry)]
else:
plotlist=list(labelplotmap.keys())
distlist=[]
for i in range(len(plotlist)):
dist=(abs(currx-plotlist[i][0])+abs(curry-plotlist[i][1]))**0.5
distlist.append(dist)
shortestdist=min(distlist)
shortestdistindex=distlist.index(shortestdist)
labelkey=labelplotmap[plotlist[shortestdistindex]]
if len(dotflash)>0:
for i in range(len(dotflash)):
figcanvas.delete(dotflash.pop(0))
dotx=plotlist[shortestdistindex][0]
doty=plotlist[shortestdistindex][1]
a=figcanvas.create_oval(dotx-1,doty-1,dotx+1,doty+1,width=1,outline='Orange',fill='Orange')
dotflash.append(a)
print(labelkey)
if labelkey in reseglabels:
seedfigflash(labelkey)
def item_drag(event):
global figcanvas,linelocs,xvalue
x=event.x
y=event.y
if x<75:
x=75
if x>425:
x=425
if y<50:
y=50
if y>350:
y=350
try:
fill=figcanvas.itemconfigure(CURRENT,'fill')[4]
dash=figcanvas.itemconfigure(CURRENT,'dash')[4]
print('dashlen',len(dash))
print(fill)
except:
return
#itemType=loccanvas.type(CURRENT)
#try:
# test=0-loccanvas._lastX
# test=0-loccanvas._lastY
#except:
# return
if fill=='red': #or fill=='orange':
figcanvas.move(CURRENT,x-figcanvas._lastX,0)
if fill=='blue': #or fill=='purple':
figcanvas.move(CURRENT,0,y-figcanvas._lastY)
figcanvas._lastX=x
figcanvas._lastY=y
if fill=='red' and len(dash)>0:
linelocs[0]=x
if fill=='red' and len(dash)==0:
linelocs[1]=x
if fill=='blue' and len(dash)>0:
linelocs[2]=y
if fill=='blue' and len(dash)==0:
linelocs[3]=y
#print(line_a)
#print(minline)
#print(maxline)
print('linelocs',linelocs)
print(cal_xvalue(linelocs[0]),cal_xvalue(linelocs[1]),cal_yvalue(linelocs[2]),cal_yvalue(linelocs[3]))
pass
def gen_convband():
global convband
if reseglabels is None:
return
processlabel=np.copy(reseglabels)
displaysize=outputsegbands[currentfilename]['iter0'].size
print('reseglabels shape',reseglabels.shape)
print('displaysize',displaysize)
forward=0
if displaysize[0]*displaysize[1]<reseglabels.shape[0]*reseglabels.shape[1]:
ratio=int(max(reseglabels.shape[0]/displaysize[1],reseglabels.shape[1]/displaysize[0]))
forward=1
else:
ratio=int(max(displaysize[0]/reseglabels.shape[1],displaysize[1]/reseglabels.shape[0]))
forward=-1
#tempband=cv2.resize(processlabel.astype('float32'),(int(processlabel.shape[1]/ratio),int(processlabel.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
print(ratio)
if int(ratio)>1:
#if processlabel.shape[0]*processlabel.shape[1]<850*850:
if forward==-1:
print('pool_backward')
cache=(np.zeros((processlabel.shape[0]*ratio,processlabel.shape[1]*ratio)),{"f":int(ratio),"stride":int(ratio)})
convband=tkintercorestat.pool_backward(processlabel,cache)
else:
if forward==1:
print('pool_forward')
convband,cache=tkintercorestat.pool_forward(processlabel,{"f":int(ratio),"stride":int(ratio)})
else:
convband=processlabel
print('convband shape',convband.shape)
def process():
# global outputbutton
if proc_mode[proc_name].get()=='1':
batchprocess.batch_process()
# outputbutton.config(state=NORMAL)
return
# else:
# outputbutton.config(state=DISABLED)
if originlabels is None:
extraction()
else:
if changekmeans==True:
extraction()
else:
if linelocs[1]==425 and linelocs[3]==50:
extraction()
else:
resegment()
gen_convband()
#highlightcoin()
def displayfig():
global loccanvas,maxx,minx,maxy,miny,linelocs,bins,ybins,figcanvas
global labelplotmap,resviewframe
global figdotlist
data=[]
originlabeldict=multi_results[currentfilename][0]
colortable=originlabeldict['iter0']['colortable']
uniquelabels=list(colortable.keys())
lenwid=[]
lenlist=[]
widlist=[]
for widget in resviewframe.winfo_children():
widget.pack_forget()
figcanvas.pack()
figcanvas.delete(ALL)
labelplotmap={}
templabelplotmap={}
unitable=[]
for uni in uniquelabels:
if uni!=0:
uni=colortable[uni]
pixelloc = np.where(reseglabels == uni)
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
length=rly-uly
width=rlx-ulx
lenwid.append((length+width))
lenlist.append(length)
widlist.append(width)
data.append(len(pixelloc[0]))
unitable.append(uni)
# templabelplotmap.update({(len(pixelloc[0]),length+width):uni})
residual,area=lm_method.lm_method(lenlist,widlist,data)
lenwid=list(residual)
data=list(area)
for i in range(len(unitable)):
templabelplotmap.update({(data[i],lenwid[i]):unitable[i]})
miny=min(lenwid)
maxy=max(lenwid)
minx=min(data)
maxx=max(data)
binwidth=(maxx-minx)/10
ybinwidth=(maxy-miny)/10
bin_edges=[]
y_bins=[]
for i in range(0,11):
bin_edges.append(minx+i*binwidth)
for i in range(0,11):
y_bins.append(miny+i*ybinwidth)
#bin_edges.append(maxx)
#bin_edges.append(maxx+binwidth)
#y_bins.append(maxy)
#y_bins.append(maxy+ybinwidth)
plotdata=[]
for i in range(len(data)):
plotdata.append((data[i],lenwid[i]))
scaledDatalist=[]
x_scalefactor=300/(maxx-minx)
y_scalefactor=250/(maxy-miny)
if maxx-minx==0:
maxx=minx+10
x_scalefactor=300/10
if maxy-miny==0:
maxy=miny+10
y_scalefactor=250/10
for (x,y) in plotdata:
xval=50+(x-minx)*x_scalefactor+50
yval=300-(y-miny)*y_scalefactor+25
scaledDatalist.append((int(xval),int(yval)))
for key in templabelplotmap:
x=key[0]
y=key[1]
xval=50+(x-minx)*x_scalefactor+50
yval=300-(y-miny)*y_scalefactor+25
unilabel=templabelplotmap[key]
labelplotmap.update({(int(xval),int(yval)):unilabel})
#print(labelplotmap)
#print(scaledDatalist)
figdotlist={}
axistest.drawdots(25+50,325+25,375+50,25+25,bin_edges,y_bins,scaledDatalist,figcanvas,figdotlist)
#loccanvas=figcanvas
#minx=25
#maxx=375
#maxy=325
#miny=25
#[25,375,325,25]
#linelocs=[25+12,375-12,25+12,325-12]
linelocs=[75+12,425-12,350-12,50+12]
#linelocs=[25+12,375-12,325-12,25+12]
bins=bin_edges
ybins=y_bins
figcanvas.bind('<Any-Enter>',item_enter)
figcanvas.bind('<Any-Leave>',item_leave)
figcanvas.bind('<Button-1>',item_start_drag)
figcanvas.bind('<B1-Motion>',item_drag)
figcanvas.bind('<Shift-Button-1>',item_multiselect)
#reseg=Button(frame,text='Re-process',command=partial(resegment,labels,figcanvas),padx=5,pady=5)
#reseg.pack()
#if outputbutton is None:
# outputbutton=Button(control_fr,text='Export Results',command=partial(export_result,'0'),padx=5,pady=5)
# outputbutton.pack()
#batchextraction()
#else:
# outputbutton.pack_forget()
# outputbutton.pack()
refbutton.config(state=NORMAL)
# refvar.set('0')
for widget in refsubframe.winfo_children():
#widget.config(state=DISABLED)
widget.config(state=NORMAL)
outputbutton.config(state=NORMAL)
#resegbutton.config(state=NORMAL)
# pcasel=[]
# pcakeys=list(pcaboxdict.keys())
# for i in range(len(pcakeys)):
# currvar=pcaboxdict[pcakeys[i]].get()
# if currvar=='1':
# pcasel.append(i+1)
kchoice=[]
kchoicekeys=list(checkboxdict.keys())
for i in range(len(kchoicekeys)):
currvar=checkboxdict[kchoicekeys[i]].get()
if currvar=='1':
kchoice.append(i+1)
pcasel=[]
pcasel.append(pc_combine_up.get()-0.5)
batch['PCweight']=pcasel.copy()
batch['PCsel']=[buttonvar.get()+1]
batch['Kmeans']=[int(kmeans.get())]
batch['Kmeans_sel']=kchoice.copy()
print(batch)
#def extraction(frame):
def extraction():
global kernersizes,multi_results,workingimg,outputimgdict,outputimgbands,pixelmmratio
global currentlabels,panelA,reseglabels,refbutton,figcanvas,resegbutton,refvar
global refsubframe,loccanvas,originlabels,changekmeans,originlabeldict,refarea
global figdotlist,segmentratio
global batch
if int(kmeans.get())==1:
messagebox.showerror('Invalid Class #',message='#Class = 1, try change it to 2 or more, and refresh Color-Index.')
return
refarea=None
multi_results.clear()
kernersizes.clear()
itervar=IntVar()
outputimgdict.clear()
outputimgbands.clear()
#for widget in frame.winfo_children():
# widget.pack_forget()
# coin=refvar.get()=='1'
edgevar=edge.get()=='1'
if edgevar:
currentlabels=removeedge(currentlabels)
nonzeros=np.count_nonzero(currentlabels)
nonzeroloc=np.where(currentlabels!=0)
try:
ulx,uly=min(nonzeroloc[1]),min(nonzeroloc[0])
except:
messagebox.showerror('Invalid Colorindices',message='Need to process colorindicies')
return
rlx,rly=max(nonzeroloc[1]),max(nonzeroloc[0])
nonzeroratio=float(nonzeros)/((rlx-ulx)*(rly-uly))
print('nonzeroratio=',nonzeroratio)
batch['nonzero']=[nonzeroratio]
#nonzeroratio=float(nonzeros)/(currentlabels.shape[0]*currentlabels.shape[1])
dealpixel=nonzeroratio*currentlabels.shape[0]*currentlabels.shape[1]
ratio=1
# if selarea.get()=='1':
selareadim=app.getinfo(rects[1])
global selareapos,originselarea
if selareadim!=[0,0,1,1] and selareadim!=[] and selareadim!=selareapos:
selareapos=selareadim
if selareapos!=[0,0,1,1] and originselarea==True:
# selareadim=app.getinfo(rects[1])
npfilter=np.zeros((displayimg['Origin']['Size'][0],displayimg['Origin']['Size'][1]))
filter=Image.fromarray(npfilter)
draw=ImageDraw.Draw(filter)
draw.ellipse(selareapos,fill='red')
filter=np.array(filter)
# start=list(selareapos)[:2]
# end=list(selareapos)[2:]
# lx,ly,rx,ry=int(min(start[0],end[0])),int(min(start[1],end[1])),int(max(start[0],end[0])),int(max(start[1],end[1]))
# filter[:,lx:rx+1]=1
# for i in range(0,ly):
# filter[i,:]=0
# for i in range(ry+1,displayimg['Origin']['Size'][0]):
# filter[i,:]=0
filter=np.divide(filter,np.max(filter))
originselarea=False
# filter=np.where(filter==max(filter),1,0)
else:
filter=np.ones((displayimg['Origin']['Size'][0],displayimg['Origin']['Size'][1]))
filter=cv2.resize(filter,(currentlabels.shape[1],currentlabels.shape[0]),interpolation=cv2.INTER_LINEAR)
selareapos=[]
print('deal pixel',dealpixel)
if dealpixel<512000:
workingimg=np.copy(currentlabels)
# if selarea.get()=='1':
workingimg=np.multiply(workingimg,filter)
else:
if nonzeroratio<=0.2:# and nonzeroratio>=0.1:
ratio=findratio([currentlabels.shape[0],currentlabels.shape[1]],[1600,1600])
print('ratio to wkimg',ratio)
# if dealpixel<512000 or currentlabels.shape[0]*currentlabels.shape[1]<=1600*1600:
# workingimg=np.copy(currentlabels)
# else:
# if currentlabels.shape[0]*currentlabels.shape[1]>1600*1600:
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/ratio),int(currentlabels.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
# if selarea.get()=='1':
filter=cv2.resize(filter,(int(currentlabels.shape[1]/ratio),int(currentlabels.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
workingimg=np.multiply(workingimg,filter)
# else:
# #ratio=1
# #print('nonzeroratio',ratio)
# workingimg=np.copy(currentlabels)
segmentratio=0
else:
# if dealpixel>512000:
if currentlabels.shape[0]*currentlabels.shape[1]>screenstd*screenstd:
segmentratio=findratio([currentlabels.shape[0],currentlabels.shape[1]],[screenstd,screenstd])
if segmentratio<2:
segmentratio=2
workingimg=cv2.resize(currentlabels,(int(currentlabels.shape[1]/segmentratio),int(currentlabels.shape[0]/segmentratio)),interpolation=cv2.INTER_LINEAR)
# if selarea.get()=='1':
filter=cv2.resize(filter,(int(currentlabels.shape[1]/segmentratio),int(currentlabels.shape[0]/segmentratio)),interpolation=cv2.INTER_LINEAR)
# filter=cv2.resize(filter,workingimg.shape[1],workingimg.shape[2],interpolation=cv2.INTER_LINEAR)
workingimg=np.multiply(workingimg,filter)
# else:
# segmentratio=1
# #print('ratio',ratio)
# workingimg=np.copy(currentlabels)
pixelmmratio=1.0
coin=False
print('nonzeroratio:',ratio,'segmentation ratio',segmentratio)
print('workingimgsize:',workingimg.shape)
pyplt.imsave('workingimg.png',workingimg)
if originlabels is None:
originlabels,border,colortable,originlabeldict=tkintercorestat.init(workingimg,workingimg,'',workingimg,10,coin)
changekmeans=False
else:
if changekmeans==True:
originlabels,border,colortable,originlabeldict=tkintercorestat.init(workingimg,workingimg,'',workingimg,10,coin)
changekmeans=False
# if segmentratio>1:
# cache=(np.zeros((currentlabels.shape[0],currentlabels.shape[1])),{"f":int(segmentratio),"stride":int(segmentratio)})
# orisize_originlabels=tkintercorestat.pool_backward(originlabels,cache)
# #originlabels=orisize_originlabels
# originlabeldict['iter0']['labels']=orisize_originlabels
multi_results.update({currentfilename:(originlabeldict,{})})
reseglabels=originlabels
labeldict=originlabeldict
colortable=originlabeldict['iter0']['colortable']
iterkeys=list(labeldict.keys())
iternum=len(iterkeys)
print(labeldict)
#iternum=3
itervar.set(len(iterkeys))
tempimgdict={}
tempimgbands={}
tempsmall={}
for key in labeldict:
tup=(labeldict[key]['labels'],labeldict[key]['counts'],labeldict[key]['colortable'],{},currentfilename)
outputdisplay,outputimg,smallset=showcounting(tup,False,True,True)
tempimgdict.update({key:outputdisplay})
tempimgbands.update({key:outputimg})
tempsmall.update({key:smallset})
outputimgdict.update({currentfilename:tempimgdict})
outputimgbands.update({currentfilename:tempimgbands})
outputsegbands.update({currentfilename:tempsmall})
#time.sleep(5)
#tup=(labeldict,coinparts,currentfilename)
#resscaler=Scale(frame,from_=1,to=iternum,tickinterval=1,length=220,orient=HORIZONTAL,variable=itervar,command=partial(changeoutputimg,currentfilename))
#resscaler.pack()
changeoutputimg(currentfilename,'1')
processlabel=np.copy(reseglabels)
tempband=np.copy(convband)
# panelA.bind('<Button-1>',lambda event,arg=processlabel:customcoin(event,processlabel,tempband))
# panelA.bind('<Shift-Button-1>',customcoin_multi)
panelA.config(cursor='hand2')
'''
data=np.asarray(border[1:])
hist,bin_edges=np.histogram(data,density=False)
figcanvas=Canvas(frame,width=400,height=350,bg='white')
figcanvas.pack()
restoplot=createBins.createBins(hist.tolist(),bin_edges.tolist(),len(bin_edges))
global minx,maxx,bins,loccanvas,linelocs
minx,maxx=histograms.plot(restoplot,hist.tolist(),bin_edges.tolist(),figcanvas)
bins=bin_edges.tolist()
loccanvas=figcanvas
linelocs=[minx,maxx]
'''
def onFrameConfigure(inputcanvas):
'''Reset the scroll region to encompass the inner frame'''
inputcanvas.configure(scrollregion=inputcanvas.bbox(ALL))
def removeedge(bands):
global pointcontainer,displayorigin
copyband=np.copy(bands)
size=copyband.shape
for i in range(20):
copyband[i,:]=0 #up
copyband[:,i]=0 #left
copyband[:,size[1]-1-i]=0 #right
copyband[size[0]-1-i,:]=0
img=ImageTk.PhotoImage(Image.fromarray(copyband.astype('uint8')))
displayimg['ColorIndices']['Image']=img
changedisplayimg(imageframe,'ColorIndices')
return copyband
def clustercontent(var):
global cluster,bandchoice,contentframe
bandchoice={}
#if var=='0':
#if var=='1':
cluster=['LabOstu','NDI','Greenness','VEG','CIVE','MExG','NDVI','NGRDI','HEIGHT','Band1','Band2','Band3']
for widget in contentframe.winfo_children():
widget.pack_forget()
for key in cluster:
tempdict={key:Variable()}
bandchoice.update(tempdict)
ch=ttk.Checkbutton(contentframe,text=key,variable=bandchoice[key])#,command=changecluster)#,command=partial(autosetclassnumber,clusternumberentry,bandchoice))
#if filedropvar.get()=='seedsample.JPG':
# if key=='NDI':
# ch.invoke()
ch.pack(fill=X)
def findtempbandgap(locs):
xloc=list(locs[1])
yloc=list(locs[0])
sortedx=sorted(xloc)
gaps={}
last=0
for i in range(len(sortedx)):
if sortedx[i]==sortedx[last]:
continue
isone = sortedx[i]-sortedx[last]==1
if isone == False:
gaps.update({(last,i-1):i-1-last+1})
last=i
print('xgaps',gaps,'len',len(sortedx))
gaps={}
last=0
sortedy=sorted(yloc)
for i in range(len(sortedy)):
if sortedy[i]==sortedy[last]:
continue
isone = sortedy[i]-sortedy[last]==1
if isone == False:
gaps.update({(last,i-1):i-1-last+1})
last=i
print('ygaps',gaps,'len',len(sortedy))
def customcoin_multi(event):
global panelA,multiselectitems
global coinbox_list,minflash,coinbox
global dotflash,figcanvas
x=event.x
y=event.y
# multiselectitems=[]
if len(minflash)>0:
for i in range(len(minflash)):
panelA.delete(minflash.pop(0))
if len(dotflash)>0:
for i in range(len(dotflash)):
figcanvas.delete(dotflash.pop(0))
panelA.delete(coinbox)
tempband=np.copy(convband)
print(tempband.shape)
coinlabel=tempband[y,x]
print('coinlabel',coinlabel,'x',x,'y',y)
if coinlabel==0:
multiselectitems=[]
if len(coinbox_list)>0:
for i in range(len(coinbox_list)):
panelA.delete(coinbox_list.pop(0))
return
else:
multiselectitems.append(coinlabel)
coinarea=np.where(tempband==coinlabel)
unix=np.unique(coinarea[1]).tolist()
uniy=np.unique(coinarea[0]).tolist()
if len(unix)==1:
ulx,rlx=unix[0],unix[0]
else:
ulx,rlx=min(coinarea[1]),max(coinarea[1])
if len(uniy)==1:
uly,rly=uniy[0],uniy[0]
else:
uly,rly=min(coinarea[0]),max(coinarea[0])
a=panelA.create_rectangle(ulx,uly,rlx+1,rly+1,outline='yellow')
coinbox_list.append(a)
# plotcoinarea=np.where(reseglabels==coinlabel)
# ulx,uly=min(plotcoinarea[1]),min(plotcoinarea[0])
# rlx,rly=max(plotcoinarea[1]),max(plotcoinarea[0])
# unix=np.unique(plotcoinarea[1]).tolist()
# uniy=np.unique(plotcoinarea[0]).tolist()
# if len(unix)==1:
# ulx,rlx=unix[0],unix[0]
# else:
# ulx,rlx=min(plotcoinarea[1]),max(plotcoinarea[1])
# if len(uniy)==1:
# uly,rly=uniy[0],uniy[0]
# else:
# uly,rly=min(plotcoinarea[0]),max(plotcoinarea[0])
# lw=rlx-ulx+rly-uly
# area=len(plotcoinarea[0])
# print('lw',lw,'area',area)
labelplotmapkeys=getkeys(labelplotmap)
for mapkey in labelplotmapkeys:
k=mapkey[0]
v=mapkey[1]
templabel=labelplotmap[mapkey]
if templabel in multiselectitems:
xval=k
yval=v
print('lw',yval,'area',xval)
plotflash(yval,xval,'Orange','Orange')
# break
def customcoin(event,processlabels,tempband):
global panelA#refarea,
global coinbox,reflabel,minflash,coinbox_list
global dotflash,figcanvas
global multiselectitems
x=event.x
y=event.y
multiselectitems=[]
if len(minflash)>0:
for i in range(len(minflash)):
panelA.delete(minflash.pop(0))
if len(dotflash)>0:
for i in range(len(dotflash)):
figcanvas.delete(dotflash.pop(0))
if len(coinbox_list)>0:
for i in range(len(coinbox_list)):
panelA.delete(coinbox_list.pop(0))
panelA.delete(coinbox)
tempband=np.copy(convband)
#ratio=findratio([processlabels.shape[0],processlabels.shape[1]],[850,850])
#tempband=cv2.resize(processlabels.astype('float32'),(int(processlabels.shape[1]/ratio),int(processlabels.shape[0]/ratio)),interpolation=cv2.INTER_LINEAR)
#if processlabels.shape[0]*processlabels.shape[1]>850*850
# tempband=
#tempband=tempband.astype('uint8')
print(tempband.shape)
coinlabel=tempband[y,x]
print('coinlabel',coinlabel,'x',x,'y',y)
#refarea=None
if coinlabel==0:
#messagebox.showerror('Invalid',message='Please pick areas have items.')
return
else:
#refarea=np.where(processlabels==coinlabel)
reflabel=coinlabel
coinarea=np.where(tempband==coinlabel)
#findtempbandgap(coinarea)
ulx,uly=min(coinarea[1]),min(coinarea[0])
rlx,rly=max(coinarea[1]),max(coinarea[0])
#copytempband=np.copy(tempband)
#temparea=copytempband[uly:rly+1,ulx:rlx+1]
#copytempband[uly:rly+1,ulx:rlx+1]=tkintercorestat.tempbanddenoice(temparea,coinlabel,len(refarea[0])/(ratio**2))
#coinarea=np.where(copytempband==coinlabel)
unix=np.unique(coinarea[1]).tolist()
uniy=np.unique(coinarea[0]).tolist()
if len(unix)==1:
ulx,rlx=unix[0],unix[0]
else:
ulx,rlx=min(coinarea[1]),max(coinarea[1])
if len(uniy)==1:
uly,rly=uniy[0],uniy[0]
else:
uly,rly=min(coinarea[0]),max(coinarea[0])
'''
try:
ulx,uly=min(coinarea[1]),min(coinarea[0])
rlx,rly=max(coinarea[1]),max(coinarea[0])
except:
coinarea=np.where(tempband==coinlabel)
ulx,uly=min(coinarea[1]),min(coinarea[0])
rlx,rly=max(coinarea[1]),max(coinarea[0])
'''
coinbox=panelA.create_rectangle(ulx,uly,rlx+1,rly+1,outline='yellow')
# plotcoinarea=np.where(reseglabels==coinlabel)
# ulx,uly=min(plotcoinarea[1]),min(plotcoinarea[0])
# rlx,rly=max(plotcoinarea[1]),max(plotcoinarea[0])
# unix=np.unique(plotcoinarea[1]).tolist()
# uniy=np.unique(plotcoinarea[0]).tolist()
# if len(unix)==1:
# ulx,rlx=unix[0],unix[0]
# else:
# ulx,rlx=min(plotcoinarea[1]),max(plotcoinarea[1])
# if len(uniy)==1:
# uly,rly=uniy[0],uniy[0]
# else:
# uly,rly=min(plotcoinarea[0]),max(plotcoinarea[0])
# lw=rlx-ulx+rly-uly
# area=len(plotcoinarea[0])
for k,v in labelplotmap:
templabel=labelplotmap[(k,v)]
if templabel==reflabel:
xval=k
yval=v
print('lw',yval,'area',xval)
plotflash(yval,xval,'Orange','Orange')
break
#panelA.unbind('<Button-1>')
def magnify(event):
global panelA
x=event.x
y=event.y
grabimg=ImageGrab.grab((x-2,y-2,x+2,y+2))
subimg=grabimg.resize((10,10))
magnifier=panelA.create_image(x-3,y-3,image=ImageTk.PhotoImage(subimg))
panelA.update()
def runflash(ulx,uly,rlx,rly,color):
global minflash,panelA
print(ulx,uly,rlx,rly)
a=panelA.create_rectangle(ulx,uly,rlx+2,rly+2,outline=color)
minflash.append(a)
def plotflash(yval,xval,outlinecolor,fillcolor):
global dotflash,figcanvas
# x_scalefactor=300/(maxx-minx)
# y_scalefactor=250/(maxy-miny)
# xval=50+(area-minx)*x_scalefactor+50
# yval=300-(lw-miny)*y_scalefactor+25
a=figcanvas.create_oval(xval-1,yval-1,xval+1,yval+1,width=1,outline=outlinecolor,fill=fillcolor)
dotflash.append(a)
def seedfigflash(topkey,multi=False):
global panelA,coinbox
global reflabel,minflash,multiselectitems
tempband=np.copy(convband)
if len(minflash)>0:
for i in range(len(minflash)):
panelA.delete(minflash.pop(0))
panelA.delete(coinbox)
if multi==False:
multiselectitems=[]
else:
multiselectitems.append(topkey)
reflabel=topkey
coinarea=np.where(tempband==topkey)
print(coinarea)
ulx,uly=min(coinarea[1]),min(coinarea[0])
rlx,rly=max(coinarea[1]),max(coinarea[0])
unix=np.unique(coinarea[1]).tolist()
uniy=np.unique(coinarea[0]).tolist()
if len(unix)==1:
ulx,rlx=unix[0],unix[0]
else:
ulx,rlx=min(coinarea[1]),max(coinarea[1])
if len(uniy)==1:
uly,rly=uniy[0],uniy[0]
else:
uly,rly=min(coinarea[0]),max(coinarea[0])
coinbox=panelA.create_rectangle(ulx,uly,rlx+2,rly+2,outline='yellow')
panelA.after(300,lambda :runflash(ulx,uly,rlx,rly,'red'))
panelA.after(600,lambda :runflash(ulx,uly,rlx,rly,'yellow'))
panelA.after(900,lambda :runflash(ulx,uly,rlx,rly,'red'))
panelA.after(1200,lambda :runflash(ulx,uly,rlx,rly,'yellow'))
panelA.after(1500,lambda :runflash(ulx,uly,rlx,rly,'red'))
panelA.after(1800,lambda :runflash(ulx,uly,rlx,rly,'yellow'))
def del_reflabel():
global reseglabels,panelA,loccanvas,linelocs,bins,ybins,figcanvas,maxx,minx,maxy,miny,refvar,refsubframe
global labelplotmap,multiselectitems,dotflash,minflash,coinbox_list,reflabel
processlabel=np.copy(reseglabels)
refarea=np.where(processlabel==reflabel)
print('reflabel to delete',reflabel)
reseglabels[refarea]=0
reflabel=0
delselarea=app.getinfo(delrects[1])
# if len(minflash)>0:
# print('delete minflash')
# for i in range(len(minflash)):
# panelA.delete(minflash.pop(0))
# if len(dotflash)>0:
# print('delete dotflash')
# for i in range(len(dotflash)):
# figcanvas.delete(dotflash.pop(0))
# if len(coinbox_list)>0:
# print('del coinbox_list')
# for i in range(len(coinbox_list)):
# panelA.delete(coinbox_list.pop(0))
if len(multiselectitems)>0:
print('del multiselection items',len(multiselectitems))
for i in range(len(multiselectitems)):
refarea=np.where(processlabel==multiselectitems.pop(0))
reseglabels[refarea]=0
thresholds=[cal_xvalue(linelocs[0]),cal_xvalue(linelocs[1])]
minthres=min(thresholds)
maxthres=max(thresholds)
lwthresholds=[cal_yvalue(linelocs[2]),cal_yvalue(linelocs[3])]
maxlw=max(lwthresholds)
minlw=min(lwthresholds)
unique,counts=np.unique(processlabel,return_counts=True)
unique=unique[1:]
counts=counts[1:]
hist=dict(zip(unique,counts))
outsizethreshold=[]
for key in hist:
if hist[key]>maxthres:
outsizethreshold.append(key)
if hist[key]<minthres:
outsizethreshold.append(key)
lenlist=[]
widlist=[]
data=[]
for uni in unique:
if uni!=0:
pixelloc = np.where(reseglabels == uni)
try:
ulx = min(pixelloc[1])
except:
continue
uly = min(pixelloc[0])
rlx = max(pixelloc[1])
rly = max(pixelloc[0])
length=rly-uly
width=rlx-ulx
lenlist.append(length)
widlist.append(width)
data.append(len(pixelloc[0]))
residual,area=lm_method.lm_method(lenlist,widlist,data)
residual=list(residual)
for i in range(len(residual)):
if residual[i]>maxlw:
outsizethreshold.append(unique[1:][i])
if residual[i]<minlw:
outsizethreshold.append(unique[1:][i])
if len(outsizethreshold)>0:
print('del outsizethreshold',len(outsizethreshold))
for i in range(len(outsizethreshold)):
deletlabel=outsizethreshold[i]
refarea=np.where(processlabel==deletlabel)
reseglabels[refarea]=0
if delselarea!=[]:
print('delselarea',delrects[1],delselarea)
npfilter=np.zeros((displayimg['Origin']['Size'][0],displayimg['Origin']['Size'][1]))
filter=Image.fromarray(npfilter)
draw=ImageDraw.Draw(filter)
draw.ellipse(delselarea,fill='red')
# filter.save('deletefilter.tiff')
filter=np.array(filter)
filter=np.divide(filter,np.max(filter))
filter=cv2.resize(filter,(reseglabels.shape[1],reseglabels.shape[0]),interpolation=cv2.INTER_LINEAR)
indices_one=np.where(filter==1)
reseglabels[indices_one]=0
process()
# gen_convband()
# panelA.delete(coinbox)
# reseglabels=tkintercorestat.renamelabels(reseglabels)
# resegment([maxx,minx],[maxy,miny])
# displayfig()
# newcolortables=tkintercorestat.get_colortable(reseglabels)
# newunique,newcounts=np.unique(reseglabels,return_counts=True)
# tup=(reseglabels,newcounts,newcolortables,{},currentfilename)
# outputdisplay,outputimg,smallset=showcounting(tup,False)
# tempimgdict={}
# tempimgbands={}
# tempsmall={}
# tempimgdict.update({'iter0':outputdisplay})
# tempimgbands.update({'iter0':outputimg})
# tempsmall.update({'iter0':smallset})
# outputimgdict.update({currentfilename:tempimgdict})
# outputimgbands.update({currentfilename:tempimgbands})
# outputsegbands.update({currentfilename:tempsmall})
# changeoutputimg(currentfilename,'1')
# #update plot
# print('done image')
# copyplotmap=labelplotmap.copy()
# for k,v in copyplotmap.items():
# if v==reflabel:
# figindex=figdotlist[k]
# figcanvas.delete(figindex)
# if len(multiselectitems)>0:
# for k,v in copyplotmap.items():
# if v in multiselectitems and v!=reflabel:
# figindex=figdotlist[k]
# figcanvas.delete(figindex)
# if len(dotflash)>0:
# for i in range(len(dotflash)):
# figcanvas.delete(dotflash.pop(0))
# #tup=list(figcanvas.find_all())
# #figcanvas.delete(tup[-1])
# multiselectitems=[]
# if len(outsizethreshold)>0:
# for k,v in copyplotmap.items():
# if v in outsizethreshold and v!=reflabel:
# figindex=figdotlist[k]
# figcanvas.delete(figindex)
# outsizethreshold=[]
# displayfig()
# labels=np.copy(reseglabels)
# reseglabels,border,colortable,labeldict=tkintercorestat.resegmentinput(labels,minthres,maxthres,minlw,maxlw)
# displayfig()
# update plot
# def selareachoice(widget):
# # global panelA,rects,selareapos,app
# global rects,selareapos,app
# app=sel_area.Application(widget)
# rects=app.start()
# # if selarea.get()=='1':
# # messagebox.showinfo('select AOI',message='Clike mouse at start point and drag on the image to define an area you want to segment.')
# # rects=app.start()
# # else:
# # selareapos=app.getinfo(rects[1])
# # app.end(rects)
#def refchoice(refsubframe):
def refchoice():
#global coinsize,sizeentry,coinbox,panelA,boundaryarea,coindict,convband
global sizeentry,coinbox,panelA,boundaryarea,coindict,convband
global refarea,reseglabels
#refsubframe.grid_forget()
#for widget in refsubframe.winfo_children():
# widget.pack_forget()
#panelA.delete(coinbox)
if refvar.get()=='1':
if type(currentlabels)==type(None):
messagebox.showerror('Invalid Option',message='Should get # class >=2 color index image first.')
return
processlabel=np.copy(reseglabels)
refarea=np.where(processlabel==reflabel)
print('refarea',len(refarea[0]))
print('reflabel',reflabel)
else:
reseglabels[refarea]=65535
refarea=None
def changekmeansbar(event):
global kmeanschanged
kmeanschanged=True
def changepcweight(event):
global pcweightchanged,kmeanschanged
# print('pca weight',pc_combine_up.get())
pcweightchanged=True
if kmeans.get()>1:
kmeanschanged=True
def changeclusterbox(event):
global clusterchanged,changekmeans
clusterchanged=True
changekmeans=True
def beforecluster(event):
global kmeanschanged,pcweightchanged,imageframe
if pcweightchanged==True:
pcweightchanged=False
pcweightupdate(imageframe)
if kmeanschanged==True:
kmeanschanged=False
changecluster('')
## ----Interface----
## ----Display----
display_fr=Frame(root,width=640,height=640)
control_fr=Frame(root,width=320,height=320)
bottomframe=Frame(root)
bottomframe.pack(side=BOTTOM)
display_fr.pack(side=LEFT)
control_fr.pack(side=LEFT)
#display_label=Text(display_fr,height=1,width=100)
#display_label.tag_config("just",justify=CENTER)
#display_label.insert(END,'Display Panel',"just")
#display_label.configure(state=DISABLED)
#display_label.pack(padx=10,pady=10)
imgtypevar.set('0')
# Open_File('seedsample.JPG')
# singleband('seedsample.JPG')
#cal indices
generatedisplayimg('seedsample.JPG')
imageframe=LabelFrame(display_fr,bd=0)
imageframe.pack()
#panelA=Label(imageframe,text='Display Panel',image=displayimg['Origin']) #620 x 620
l=displayimg['Origin']['Size'][0]
w=displayimg['Origin']['Size'][1]
panelA=Canvas(imageframe,width=w,height=l,bg='white')
panelA.create_image(0,0,image=displayimg['Origin']['Image'],anchor=NW)
panelA.pack(padx=20,pady=20,expand=YES)
buttondisplay=LabelFrame(bottomframe,bd=0)
buttondisplay.config(cursor='hand2')
buttondisplay.pack(side=LEFT)
proc_name='batch_mode'
proc_mode={proc_name:Variable()}
proc_mode[proc_name].set('0')
proc_but=Checkbutton(buttondisplay,text=proc_name,variable=proc_mode[proc_name])
proc_but.pack(side=LEFT,padx=20,pady=5)
openfilebutton=Button(buttondisplay,text='Image',command=Open_Multifile,cursor='hand2')
openfilebutton.pack(side=LEFT,padx=20,pady=5)
mapbutton=Button(buttondisplay,text='Pilot',cursor='hand2',command=Open_Map)
mapbutton.pack(side=LEFT,padx=20,pady=5)
# disbuttonoption={'Origin':'1','PCs':'5','Color Deviation':'2','ColorIndices':'3','Output':'4'}
# buttonname={'Raw':'1','PCs':'5','Clusters':'2','Selected':'3','Output':'4'}
# #disbuttonoption={'Origin':'1','ColorIndices':'3','Output':'4'}
# for (text,v1),(name,v2) in zip(disbuttonoption.items(),buttonname.items()):
# b=Radiobutton(buttondisplay,text=name,variable=displaybut_var,value=disbuttonoption[text],command=partial(changedisplayimg,imageframe,controlframe,text))
# b.pack(side=LEFT,padx=20,pady=5)
# b.configure(state=DISABLED)
# if disbuttonoption[text]=='1':
# b.invoke()
### ---open file----
## ----Control----
#control_label=Text(control_fr,height=1,width=50)
#control_label.tag_config("just",justify=CENTER)
#control_label.insert(END,'Control Panel',"just")
#control_label.configure(state=DISABLED)
#control_label.pack()
filter_fr=LabelFrame(control_fr,bd=0)
filter_fr.pack()
imgtypeframe=LabelFrame(filter_fr,text='Image type',bd=0)
#imgtypeframe.pack()
imgtypeoption=[('Crop plots','1'),('Grain kernel','0')]
for text,mode in imgtypeoption:
b=Radiobutton(imgtypeframe,text=text,variable=imgtypevar,value=mode,command=partial(clustercontent,mode))
#b.pack(side=LEFT,padx=6)
### ---change file---
changefileframe=LabelFrame(filter_fr,text='Change Files',cursor='hand2')
#changefileframe.pack()
# filedropvar.set(filenames[0])
# changefiledrop=OptionMenu(changefileframe,filedropvar,*filenames,command=partial(changeimage,imageframe))
# changefiledrop.pack()
### ---choose color indices---
# '''
# chframe=LabelFrame(filter_fr,text='Select indicies below',cursor='hand2',bd=0)
# chframe.pack()
# chcanvas=Canvas(chframe,width=200,height=110,scrollregion=(0,0,400,400))
# chcanvas.pack(side=LEFT)
# chscroller=Scrollbar(chframe,orient=VERTICAL)
# chscroller.pack(side=RIGHT,fill=Y,expand=True)
# chcanvas.config(yscrollcommand=chscroller.set)
# chscroller.config(command=chcanvas.yview)
# contentframe=LabelFrame(chcanvas)
# chcanvas.create_window((4,4),window=contentframe,anchor=NW)
# contentframe.bind("<Configure>",lambda event,arg=chcanvas:onFrameConfigure(arg))
#
# for key in cluster:
# tempdict={key:Variable()}
# bandchoice.update(tempdict)
# ch=ttk.Checkbutton(contentframe,text=key,variable=bandchoice[key])#,command=changecluster)#,command=partial(autosetclassnumber,clusternumberentry,bandchoice))
# if filedropvar.get()=='seedsample.JPG':
# if key=='LabOstu':
# ch.invoke()
# ch.pack(fill=X)
# '''
### ----Class NUM----
kmeansgenframe=LabelFrame(filter_fr,cursor='hand2',bd=0)
pcaframe=LabelFrame(kmeansgenframe,text=' By PCs',cursor='hand2',bd=0)
kmeansgenframe.pack()
pcaframe.pack()
# pcselframe=LabelFrame(kmeansgenframe)
# pcselframe.pack()
kmeanslabel=LabelFrame(kmeansgenframe,text='By Clusters',bd=0)
checkboxframe=LabelFrame(filter_fr,cursor='hand2',bd=0)#,text='Select classes',cursor='hand2')
kmeanslabel.pack()
pcaboxdict={}
pc1label=Label(pcaframe,text='PC1',bd=0)
pc1label.pack(side=LEFT)
pccombinebar_up=ttk.Scale(pcaframe,from_=0,to=1,length=350,orient=HORIZONTAL,variable=pc_combine_up,command=changepcweight)#,command=partial(pcweightupdate,'',imageframe))#,command=partial(print,pc_combine_up.get))
pc_combine_up.set(0.5)
pccombinebar_up.pack(side=LEFT)
pccombinebar_up.state(["disabled"])
pc2label=Label(pcaframe,text='PC2',bd=0)
pc2label.pack(side=LEFT)
# for i in range(10):
# dictkey=str(i+1)
# tempdict={dictkey:Variable()}
# if i==0:
# tempdict[dictkey].set('1')
# else:
# tempdict[dictkey].set('0')
# pcaboxdict.update(tempdict)
# ch=Checkbutton(pcselframe,text=dictkey,variable=pcaboxdict[dictkey])#,command=changepca)
# ch.configure(state=DISABLED)
# ch.pack(side=LEFT)
# pcaframe.config(state=DISABLED)
keys=pcaboxdict.keys()
oldpcachoice=[]
for key in keys:
oldpcachoice.append(pcaboxdict[key].get())
kmeans.set(1)
#kmeansbar=Scale(kmeanslabel,from_=1,to=10,tickinterval=1,length=270,showvalue=0,orient=HORIZONTAL,variable=kmeans,command=partial(generatecheckbox,checkboxframe))
kmeansbar=ttk.Scale(kmeanslabel,from_=1,to=10,length=350,orient=HORIZONTAL,variable=kmeans,cursor='hand2',command=partial(generatecheckbox,checkboxframe))
kmeansbar.pack()
# kmeansbar.bind('<ButtonRelease-1>',changecluster)
kmeansbar.state(["disabled"])
# pcaframe.bind('<Leave>',lambda event,arg=imageframe:pcweightupdate(arg))
kmeansgenframe.bind('<Leave>',beforecluster)
checkboxframe.pack()
checkboxframe.bind('<Leave>',generateimgplant)
for i in range(10):
dictkey=str(i+1)
tempdict={dictkey:Variable()}
# if i==0:
# tempdict[dictkey].set('1')
# else:
tempdict[dictkey].set('0')
checkboxdict.update(tempdict)
ch=Checkbutton(checkboxframe,text=dictkey,variable=checkboxdict[dictkey],command=partial(generateimgplant,''))
if i+1>int(kmeans.get()):
ch.config(state=DISABLED)
ch.pack(side=LEFT)
kmeanscanvasframe=LabelFrame(kmeansgenframe,bd='0')
kmeanscanvasframe.pack()
kmeanscanvas=Canvas(kmeanscanvasframe,width=350,height=10,bg='Black')
#reshapemodified_tif=np.zeros((displaybandarray[currentfilename]['LabOstu'].shape[0]*displaybandarray[currentfilename]['LabOstu'].shape[1],3))
#colordicesband=kmeansclassify(['LabOstu'],reshapemodified_tif)
#colordicesband=kmeansclassify([],reshapemodified_tif)
# colordicesband=kmeansclassify()
# generateimgplant(colordicesband)
# changedisplayimg(imageframe,'Origin')
# getPCs()
colorstrip=np.zeros((15,35*2,3),'uint8')
for i in range(2):
for j in range(0,35):
colorstrip[:,i*35+j]=colorbandtable[i,:]
#pyplt.imsave('colorstrip.jpeg',colorstrip)
kmeanscanvas.delete(ALL)
#colorimg=cv2.imread('colorstrip.jpeg',flags=cv2.IMREAD_ANYCOLOR)
colorimg=np.copy(colorstrip)
colorimg=ImageTk.PhotoImage(Image.fromarray(colorimg.astype('uint8')))
kmeanscanvas.create_image(0,0,image=colorimg,anchor=NW)
kmeanscanvas.pack()
#generatecheckbox(checkboxframe,2)
#refreshebutton=Button(filter_fr,text='Refresh ColorIndices',cursor='hand2',command=changecluster)
#refreshebutton.pack()
### --- ref and edge settings ---
#for text,mode in refoption:
# b=Radiobutton(refframe,text=text,variable=refvar,value=mode,command=partial(refchoice,refsubframe))
#b.pack(side=LEFT,padx=15)
# b.grid(row=0,column=column)
# column+=1
edgeframe=LabelFrame(filter_fr,text='Edge remove setting')
#edgeframe.pack()
edgeoption=[('Remove edge','1'),('Keep same','0')]
edge.set('0')
for text,mode in edgeoption:
b=Radiobutton(edgeframe,text=text,variable=edge,value=mode)
b.pack(side=LEFT,padx=6)
### ---start extraction---
#extractionframe=LabelFrame(control_fr,cursor='hand2',bd=0)
#extractionframe.pack(padx=5,pady=5)
resviewframe=LabelFrame(control_fr,cursor='hand2',bd=0)
figcanvas=Canvas(resviewframe,width=450,height=400,bg='white')
figcanvas.pack()
#figcanvas.grid(row=0,column=0)
resviewframe.pack()
#refframe=LabelFrame(control_fr,cursor='hand2',bd=0)
refframe=LabelFrame(bottomframe,cursor='hand2',bd=0)
refframe.pack(side=LEFT)
disbuttonoption={'Origin':'1','PCs':'5','Color Deviation':'2','ColorIndices':'3','Output':'4'}
buttonname={'Raw':'1','PCs':'5','Clusters':'2','Selected':'3','Output':'4'}
#disbuttonoption={'Origin':'1','ColorIndices':'3','Output':'4'}
for (text,v1),(name,v2) in zip(disbuttonoption.items(),buttonname.items()):
b=Radiobutton(buttondisplay,text=name,variable=displaybut_var,value=disbuttonoption[text],command=partial(changedisplayimg,imageframe,text))
b.pack(side=LEFT,padx=20,pady=5)
b.configure(state=DISABLED)
if disbuttonoption[text]=='1':
b.invoke()
# selareabutton=Checkbutton(buttondisplay,text='SelArea',variable=selarea,command=selareachoice)
# selarea.set('0')
# selareabutton.pack(side=LEFT)
# selareabutton.configure(state=DISABLED)
refoption=[('Use Ref','1'),('No Ref','0')]
refvar.set('0')
refsubframe=LabelFrame(refframe,bd=0)
column=0
#refoption=[('Max','1'),('Min','2'),('Spec','3')]
#for text,mode in refoption:
# b=Radiobutton(refsubframe,text=text,variable=coinsize,value=mode,command=highlightcoin)#,command=partial(highlightcoin,processlabels,coindict,miniarea))
# b.pack(side=LEFT,padx=5)
# if mode=='1':
# b.invoke()
refsubframe.pack(side=LEFT)
refbutton=Checkbutton(refsubframe,text='Ref',variable=refvar,command=refchoice)
#refbutton.config(state=DISABLED)
refbutton.pack(side=LEFT,padx=20,pady=5)
sizeentry=Entry(refsubframe,width=5)
sizeentry.insert(END,285)
sizeentry.pack(side=LEFT,padx=2)
sizeunit=Label(refsubframe,text='mm^2')
sizeunit.pack(side=LEFT)
delrefbutton=Button(refsubframe,text='Delete',command=del_reflabel)
delrefbutton.pack(side=LEFT,padx=40)
#delrefbutton.config(state=DISABLED)
#refbutton=Checkbutton(refsubframe,text='Ref',variable=refvar,command=partial(refchoice,refsubframe))
for widget in refsubframe.winfo_children():
widget.config(state=DISABLED)
#extractbutton=Button(refframe,text='Process',command=partial(extraction))
extractbutton=Button(refframe,text='Segment',command=process)
extractbutton.configure(activebackground='blue',state=DISABLED)
extractbutton.pack(side=LEFT,padx=20,pady=5)
outputbutton=Button(refframe,text='Export',command=partial(export_result,'0'))
outputbutton.pack(side=LEFT,padx=20,pady=5)
outputbutton.config(state=DISABLED)
#resegbutton=Button(extractionframe,text='Re-Segment',command=resegment)
#resegbutton.pack(side=LEFT)
#resegbutton.config(state=DISABLED)
changekmeans=False
colorstripdict={}
for i in range(1,11):
colorstrip=np.zeros((15,35*i,3),'uint8')
for j in range(i):
for k in range(35):
colorstrip[:,j*35+k]=colorbandtable[j,:]
#loadimg=cv2.imread('colorstrip'+str(i)+'.png')
photoimg=ImageTk.PhotoImage(Image.fromarray(colorstrip.astype('uint8')))
colorstripdict.update({'colorstrip'+str(i):photoimg})
root.mainloop()
```
#### File: 12HuYang/GridFree/tkintersinglecore.py
```python
import numpy
import csv
#import time
#from skimage.feature import corner_fast,corner_peaks,corner_harris,corner_shi_tomasi
global lastlinecount,misslabel
from scipy.stats import shapiro
from scipy import ndimage as ndi
from skimage.morphology import watershed
from skimage.feature import peak_local_max
import tkintercore
colortable={}
colormatch={}
caliavgarea=0
calimax=0
calimin=0
calisigma=0
greatareas=[]
class node:
def __init__(self,i,j):
self.i=i
self.j=j
self.label=0
self.check=False
def boundarywatershed(area,segbondtimes,boundarytype): #area = 1's
if caliavgarea is not None and numpy.count_nonzero(area)<caliavgarea/2:
return area
x=[0,-1,-1,-1,0,1,1,1]
y=[1,1,0,-1,-1,-1,0,1]
areaboundary=tkintercore.get_boundary(area)
temparea=area-areaboundary
arealabels=tkintercore.labelgapnp(temparea)
unique, counts = numpy.unique(arealabels, return_counts=True)
if segbondtimes>=20:
return area
if(len(unique)>2):
res=arealabels+areaboundary
leftboundaryspots=numpy.where(areaboundary==1)
leftboundary_y=leftboundaryspots[0].tolist()
leftboundary_x=leftboundaryspots[1].tolist()
for uni in unique[1:]:
labelboundaryloc=tkintercore.get_boundaryloc(arealabels,uni)
for m in range(len(labelboundaryloc[0])):
for k in range(len(y)):
i = labelboundaryloc[0][m] + y[k]
j = labelboundaryloc[1][m] + x[k]
if i >= 0 and i < res.shape[0] and j >= 0 and j < res.shape[1]:
if res[i, j] == 1:
res[i,j]=uni
for n in range(len(leftboundary_y)):
if leftboundary_y[n]==i and leftboundary_x[n]==j:
leftboundary_y.pop(n)
leftboundary_x.pop(n)
break
res=numpy.asarray(res)-1
res=numpy.where(res<0,0,res)
return res
else:
newarea=boundarywatershed(temparea,segbondtimes+1,boundarytype)*2
res=newarea+areaboundary
leftboundaryspots=numpy.where(res==1)
leftboundary_y = leftboundaryspots[0].tolist()
leftboundary_x = leftboundaryspots[1].tolist()
unique=numpy.unique(newarea)
for uni in unique[1:]:
labelboundaryloc = tkintercore.get_boundaryloc(newarea, uni)
for m in range(len(labelboundaryloc[0])):
for k in range(len(y)):
i = labelboundaryloc[0][m] + y[k]
j = labelboundaryloc[1][m] + x[k]
if i >= 0 and i < res.shape[0] and j >= 0 and j < res.shape[1]:
if res[i, j] == 1:
res[i, j] = uni
for n in range(len(leftboundary_y)):
if leftboundary_y[n] == i and leftboundary_x[n] == j:
leftboundary_y.pop(n)
leftboundary_x.pop(n)
break
res=numpy.asarray(res)/2
res=numpy.where(res<1,0,res)
return res
def manualboundarywatershed(area):
'''
if numpy.count_nonzero(area)<avgarea/2:
return area
x=[0,-1,-1,-1,0,1,1,1]
y=[1,1,0,-1,-1,-1,0,1]
leftboundaryspots=numpy.where(area==1)
pixelcount=1
label=1
for k in range(len(leftboundaryspots[0])):
i=leftboundaryspots[0][k]
j=leftboundaryspots[1][k]
area[i][j]=label
pixelcount+=1
if pixelcount==int(avgarea):
pixelcount=1
label+=1
unique,count=numpy.unique(area,return_counts=True)
for i in range(1,len(count)):
if count[i]<avgarea/2:
area=numpy.where(area==unique[i],unique[i-1],area)
'''
maskpara=0.5
possiblecount=int(numpy.count_nonzero(area)/caliavgarea)
distance=ndi.distance_transform_edt(area)
masklength=int((caliavgarea*maskpara)**0.5)-1
local_maxi=peak_local_max(distance,indices=False,footprint=numpy.ones((masklength,masklength)),labels=area)
markers=ndi.label(local_maxi)[0]
unique=numpy.unique(markers)
while(len(unique)-1>possiblecount):
maskpara+=0.1
masklength=int((caliavgarea*maskpara)**0.5)-1
local_maxi=peak_local_max(distance,indices=False,footprint=numpy.ones((masklength,masklength)),labels=area)
markers=ndi.label(local_maxi)[0]
unique=numpy.unique(markers)
while(len(unique)-1<possiblecount):
maskpara-=0.1
masklength=int((caliavgarea*maskpara)**0.5)-1
try:
local_maxi=peak_local_max(distance,indices=False,footprint=numpy.ones((masklength,masklength)),labels=area)
except:
maskpara+=0.1
masklength=int((caliavgarea*maskpara)**0.5)-1
local_maxi=peak_local_max(distance,indices=False,footprint=numpy.ones((masklength,masklength)),labels=area)
markers=ndi.label(local_maxi)[0]
break
markers=ndi.label(local_maxi)[0]
unique=numpy.unique(markers)
localarea=watershed(-distance,markers,mask=area)
return localarea
def manualdivide(area,greatareas):
global exceptions
unique, counts = numpy.unique(area, return_counts=True)
hist=dict(zip(unique,counts))
del hist[0]
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
while len(greatareas)>0:
topkey=greatareas.pop(0)
locs=numpy.where(area==topkey)
ulx,uly=min(locs[1]),min(locs[0])
rlx,rly=max(locs[1]),max(locs[0])
subarea=area[uly:rly+1,ulx:rlx+1]
subarea=subarea.astype(float)
tempsubarea=subarea/topkey
newtempsubarea=numpy.where(tempsubarea!=1.,0,1).astype(int)
antitempsubarea=numpy.where((tempsubarea!=1.) & (tempsubarea!=0),subarea,0)
times=len(locs[0])/meanpixel
averagearea=len(locs[0])/times
newsubarea=manualboundarywatershed(newtempsubarea)
labelunique,labcounts=numpy.unique(newsubarea,return_counts=True)
labelunique=labelunique.tolist()
labcounts=labcounts.tolist()
if len(labelunique)>2:
newsubarea=newsubarea*topkey
newlabel=labelunique.pop(-1)
maxlabel=area.max()
add=1
while newlabel>1:
newsubarea=numpy.where(newsubarea==topkey*newlabel,maxlabel+add,newsubarea)
print('new label: '+str(maxlabel+add))
newlabelcount=len(numpy.where(newsubarea==maxlabel+add)[0].tolist())
print('add '+'label: '+str(maxlabel+add)+' count='+str(newlabelcount))
newlabel=labelunique.pop(-1)
add+=1
newsubarea=newsubarea+antitempsubarea.astype(int)
area[uly:rly+1,ulx:rlx+1]=newsubarea
#labels=relabel(labels)
unique, counts = numpy.unique(area, return_counts=True)
hist=dict(zip(unique,counts))
del hist[0]
print('hist length='+str(len(counts)-1))
print('max label='+str(area.max()))
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
def combineloop(area,misslabel):
global tinyareas
localarea=numpy.asarray(area)
unique, counts = numpy.unique(localarea, return_counts=True)
hist=dict(zip(unique,counts))
del hist[0]
#print('hist length='+str(len(counts)-1))
#print('max label='+str(labels.max()))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
sortedkeys=list(sorted(hist,key=hist.get))
topkey=sortedkeys.pop(0)
tinyareas=[]
while misslabel<=0:# or gocombine==True:
#while hist[topkey]<max(avgarea*0.75,lowrange):
#topkey=sortedkeys.pop(0)
print('uprange='+str(uprange))
print('lowrange='+str(lowrange))
print('combine part')
i=topkey
print(i,hist[i])
if hist[i]<lowrange and i not in tinyareas:
#if hist[i]<meanpixel:
locs=numpy.where(localarea==i)
ulx,uly=min(locs[1]),min(locs[0])
rlx,rly=max(locs[1]),max(locs[0])
width=rlx-ulx
height=rly-uly
#windowsize=min(width,height)
#dividen=2
subarea=localarea[uly:rly+1,ulx:rlx+1]
tempsubarea=subarea/i
#four direction searches
stop=False
poscombines=[]
for j in range(1,11):
up_unique=[]
down_unique=[]
left_unique=[]
right_unique=[]
maxlabel={}
tempcombines=[]
if uly-j>=0 and stop==False and len(up_unique)<2:
uparray=localarea[uly-j:uly,ulx:rlx+1]
up_unique=numpy.unique(uparray)
for x in range(len(up_unique)):
if up_unique[x]>0:
tempdict={up_unique[x]:hist[up_unique[x]]}
maxlabel.update(tempdict)
if rly+j<localarea.shape[0] and stop==False and len(down_unique)<2:
downarray=localarea[rly+1:rly+j+1,ulx:rlx+1]
down_unique=numpy.unique(downarray)
for x in range(len(down_unique)):
if down_unique[x]>0:
tempdict={down_unique[x]:hist[down_unique[x]]}
maxlabel.update(tempdict)
if ulx-j>=0 and stop==False and len(left_unique)<2:
leftarray=localarea[uly:rly+1,ulx-j:ulx]
left_unique=numpy.unique(leftarray)
for x in range(len(left_unique)):
if left_unique[x]>0:
tempdict={left_unique[x]:hist[left_unique[x]]}
maxlabel.update(tempdict)
if ulx+j<localarea.shape[1] and stop==False and len(right_unique)<2:
rightarray=localarea[uly:rly+1,rlx+1:rlx+j+1]
right_unique=numpy.unique(rightarray)
for x in range(len(right_unique)):
if right_unique[x]>0:
tempdict={right_unique[x]:hist[right_unique[x]]}
maxlabel.update(tempdict)
print(up_unique,down_unique,left_unique,right_unique)
tempcombines.append(up_unique)
tempcombines.append(down_unique)
tempcombines.append(left_unique)
tempcombines.append(right_unique)
poscombines.append(tempcombines)
tinylist=[]
while(len(poscombines)>0 and stop==False):
top=poscombines.pop(0)
tinylist.append(top)
toplist=[]
for j in range(4):
toparray=top[j]
topunique=numpy.unique(toparray)
for ele in topunique:
toplist.append(ele)
toplist=numpy.array(toplist)
combunique,combcount=numpy.unique(toplist,return_counts=True)
toplist=dict(zip(combunique,combcount))
toplist=list(sorted(toplist,key=toplist.get,reverse=True))
while(len(toplist)>0):
top=toplist.pop(0)
if top!=0:
topcount=hist[top]
if hist[i]+topcount>lowrange and hist[i]+topcount<uprange:
localarea=tkintercore.combinecrops(localarea,subarea,i,top,ulx,uly,rlx,rly)
stop=True
if len(poscombines)==0 and stop==False: #combine to the closest one
tinyareas.append(topkey)
#misslabel+=1
unique, counts = numpy.unique(localarea, return_counts=True)
hist=dict(zip(unique,counts))
sortedkeys=list(sorted(hist,key=hist.get))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
#if stop==False and leftsigma>rightsigma:
# localarea=numpy.where(localarea==topkey,0,localarea)
topkey=sortedkeys.pop(0)
print('hist leng='+str(len(unique[1:])))
else:
if len(sortedkeys)>0:
topkey=sortedkeys.pop(0)
else:
misslabel+=1
return localarea
def divideloop(area):
global greatareas
unique, counts = numpy.unique(area, return_counts=True)
hist=dict(zip(unique,counts))
del hist[0]
#print('hist length='+str(len(counts)-1))
#print('max label='+str(labels.max()))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
if leftsigma>rightsigma:
minisigma=min(leftsigma,rightsigma)-0.5
else:
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
topkey=sortedkeys.pop(0)
greatareas=[]
while len(sortedkeys)>0:
print('divide loop topkey='+str(topkey),hist[topkey])
if topkey!=0 and hist[topkey]>uprange:
locs=numpy.where(area==topkey)
ulx,uly=min(locs[1]),min(locs[0])
rlx,rly=max(locs[1]),max(locs[0])
subarea=area[uly:rly+1,ulx:rlx+1]
tempsubarea=subarea/topkey
newtempsubarea=numpy.where(tempsubarea!=1.,0,1)
antitempsubarea=numpy.where((tempsubarea!=1.) & (tempsubarea!=0),subarea,0)
newsubarea=boundarywatershed(newtempsubarea,1,'inner')#,windowsize)
labelunique,labcounts=numpy.unique(newsubarea,return_counts=True)
labelunique=labelunique.tolist()
if len(labelunique)>2:
newsubarea=newsubarea*topkey
newlabel=labelunique.pop(-1)
maxlabel=area.max()
add=1
while newlabel>1:
newsubarea=numpy.where(newsubarea==topkey*newlabel,maxlabel+add,newsubarea)
print('new label: '+str(maxlabel+add))
newlabelcount=len(numpy.where(newsubarea==maxlabel+add)[0].tolist())
print('add '+'label: '+str(maxlabel+add)+' count='+str(newlabelcount))
newlabel=labelunique.pop(-1)
add+=1
newsubarea=newsubarea+antitempsubarea.astype(int)
area[uly:rly+1,ulx:rlx+1]=newsubarea
unique, counts = numpy.unique(area, return_counts=True)
hist=dict(zip(unique,counts))
del hist[0]
print('hist length='+str(len(counts)-1))
print('max label='+str(area.max()))
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
topkey=sortedkeys.pop(0)
else:
if hist[topkey]>uprange:
if topkey not in greatareas:
greatareas.append(topkey)
topkey=sortedkeys.pop(0)
else:
break
else:
topkey=sortedkeys.pop(0)
return area
def findcoin(area):
unique, counts = numpy.unique(area, return_counts=True)
maxpixel=max(counts[1:])
maxpixelind=list(counts[1:]).index(maxpixel)
maxpixellabel=unique[1:][maxpixelind]
coinlocs=numpy.where(area==maxpixellabel)
coinulx=min(coinlocs[1])
coinuly=min(coinlocs[0])
coinrlx=max(coinlocs[1])
coinrly=max(coinlocs[0])
coinparts={}
coinparts.update({maxpixellabel:coinlocs})
for uni in unique:
if uni!=maxpixellabel:
templocs=numpy.where(area==uni)
tempulx=min(templocs[1])
tempuly=min(templocs[0])
temprlx=max(templocs[1])
temprly=max(templocs[0])
#inside coin boundingbox
if tempulx>=coinulx and tempulx<=coinrlx and temprlx>=coinulx and temprlx<=coinrlx:
if tempuly>=coinuly and tempuly<=coinrly and temprly>=coinuly and temprly<=coinrly:
if uni not in coinparts:
coinparts.update({uni:templocs})
continue
if (tempulx>coinulx and tempulx<coinrlx) or (temprlx>coinulx and temprlx<coinrlx):
if (tempuly>coinuly and tempuly<coinrly) or (temprly>coinuly and temprly<coinrly):
if uni not in coinparts:
coinparts.update({uni:templocs})
continue
return coinparts
def processinput(input,ittimes=30,coin=True):
band=input
boundaryarea=boundarywatershed(band,1,'inner')
boundaryarea=boundaryarea.astype(int)
originmethod,misslabel,colortable=tkintercore.relabel(boundaryarea)
labels=numpy.where(boundaryarea<1,0,boundaryarea)
if coin:
coinparts=findcoin(labels)
coinkeys=coinparts.keys()
for part in coinkeys:
labels=numpy.where(labels==part,0,labels)
else:
coinparts={}
#labels=boundaryarea
unique, counts = numpy.unique(labels, return_counts=True)
hist=dict(zip(unique,counts))
divide=0
docombine=0
with open('countlist.csv','w') as f:
writer=csv.writer(f)
templist=counts[1:].tolist()
for item in templist:
tempitem=str(item)
writer.writerow([tempitem])
f.close()
#print(numpy.column_stack(counts[1:]))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stat,p=shapiro(countseed)
alpha=0.05
if p>alpha:
print('like gaussian')
else:
print('does not like gaussian')
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
allinexceptsions,godivide,gocombine=tkintercore.checkvalid(p,leftsigma,rightsigma)
#while allinexceptsions is False:
lastgreatarea=[]
lasttinyarea=[]
for it in range(ittimes):
if godivide==False and gocombine==False:
break
#while godivide==True or gocombine==True:
try:
del hist[0]
except KeyError:
#continue
pass
print('hist length='+str(len(counts)-1))
print('max label='+str(labels.max()))
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
with open('countseed'+str(it)+'.csv','w') as f:
csvwriter=csv.writer(f)
content=['index','pixels']
csvwriter.writerow(content)
for i in range(len(counts[1:])):
content=[str(i+1),str(counts[1:][i])]
csvwriter.writerow(content)
f.close()
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
sortedkeys=list(sorted(hist,key=hist.get,reverse=True))
#j=0
if godivide is True:
labels=divideloop(labels)
#unique=numpy.unique(labels).tolist()
#for i in range(len(unique)):
# labels=numpy.where(labels==unique[i],i,labels)
unique, counts = numpy.unique(labels, return_counts=True)
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
divide+=1
outputlabel,misslabel,colortable=tkintercore.relabel(labels)
if lastgreatarea==greatareas and len(lastgreatarea)!=0:
manualdivide(labels,greatareas)
#cornerdivide(labels,greatareas)
lastgreatarea[:]=greatareas[:]
stat,p=shapiro(countseed)
#allinexceptsions,godivide,gocombine=checkvalid(misslabel,hist,sortedkeys,uprange,lowrange,avgarea)
allinexceptsions,godivide,gocombine=tkintercore.checkvalid(p,leftsigma,rightsigma)
if gocombine is True:
labels=combineloop(labels,0)
#unique=numpy.unique(labels).tolist()
#for i in range(len(unique)):
# labels=numpy.where(labels==unique[i],i,labels)
unique, counts = numpy.unique(labels, return_counts=True)
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
minisigma=min(leftsigma,rightsigma)
#uprange=meanpixel+minisigma*stdpixel
#lowrange=meanpixel-minisigma*stdpixel
uprange=calimax
lowrange=calimin
docombine+=1
outputlabel,misslabel,colortable=tkintercore.relabel(labels)
unique, counts = numpy.unique(labels, return_counts=True)
meanpixel=sum(counts[1:])/len(counts[1:])
countseed=numpy.asarray(counts[1:])
stdpixel=numpy.std(countseed)
hist=dict(zip(unique,counts))
for ele in sorted(hist,key=hist.get):
if hist[ele]<lowrange:
print('tinyarea:',ele,hist[ele])
if hist[ele]>uprange:
print('greatarea:',ele,hist[ele])
leftsigma=(meanpixel-min(countseed))/stdpixel
rightsigma=(max(countseed)-meanpixel)/stdpixel
stat,p=shapiro(countseed)
#allinexceptsions,godivide,gocombine=checkvalid(misslabel,hist,sortedkeys,uprange,lowrange,avgarea)
allinexceptsions,godivide,gocombine=tkintercore.checkvalid(p,leftsigma,rightsigma)
print('DONE!!! counts='+str(len(counts)))
labels=tkintercore.renamelabels(labels)
colorlabels,misslabel,colortable=tkintercore.relabel(labels)
NDVIbounary=tkintercore.get_boundary(labels)
NDVIbounary=NDVIbounary*255
res=NDVIbounary
return labels,res,colortable,coinparts
def init(input,caliberation,ittimes,coin):
global caliavgarea,calimax,calimin,calisigma
caliavgarea=caliberation['mean']
calimax=caliberation['max']
calimin=caliberation['min']
calisigma=caliberation['sigma']
input=input.astype(int)
pixellocs=numpy.where(input!=0)
ulx,uly=min(pixellocs[1]),min(pixellocs[0])
rlx,rly=max(pixellocs[1]),max(pixellocs[0])
squarearea=(rlx-ulx)*(rly-uly)
occupiedratio=len(pixellocs[0])/squarearea
print(caliavgarea,occupiedratio)
if occupiedratio>0.1:
while(occupiedratio>0.1):
distance=ndi.distance_transform_edt(input)
input=numpy.where(distance==1.0,0,input)
pixellocs=numpy.where(input!=0)
ulx,uly=min(pixellocs[1]),min(pixellocs[0])
rlx,rly=max(pixellocs[1]),max(pixellocs[0])
squarearea=(rlx-ulx)*(rly-uly)
occupiedratio=len(pixellocs[0])/squarearea
print(caliavgarea,occupiedratio)
#lastlinecount=lastline
#if occupiedratio>=0.5:
labels,res,colortable,coinparts=processinput(input,ittimes,coin)
#else:
# labels,res,colortable,greatareas,tinyareas=kmeansprocess(pixellocs,input,counts)
return labels,res,colortable,coinparts
``` |
{
"source": "12HuYang/Rooster",
"score": 2
} |
#### File: 12HuYang/Rooster/rooster_batch.py
```python
import tkinter.filedialog as filedialog
from tkinter import messagebox
import os
from predictionModel import predictionCNN
from PIL import Image,ImageDraw,ImageFont
import numpy as np
import multiprocessing
import time
FOLDER=''
exportpath=''
batch_filenames=[]
class batch_ser_func():
def __init__(self,filename,dlinput,inputconfidence):
self.file=filename
self.folder=FOLDER
self.exportpath=exportpath
self.dlinput=dlinput
self.confidence=None
self.confidthres=inputconfidence
self.RGBimg=Image.open(os.path.join(FOLDER,self.file))
self.rgbwidth,self.rgbheight=self.RGBimg.size
self.imgsize={}
self.imgsize.update({'row':self.rgbheight})
self.imgsize.update({'col':self.rgbwidth})
self.npimage=None
self.localdlinput=None
self.predres=None
def addbars(self,locs):
if self.localdlinput['model']=='':
return
x0=min(locs[1])
y0=min(locs[0])
x1=max(locs[1])
y1=max(locs[0])
draw=ImageDraw.Draw(self.RGBimg)
# endx=int(x0+(x1-x0)/2)
# endy=int(y0+(y1-y0)/2)
draw.line(((x0,y0),(x1,y0)),fill='red',width=5) #draw up red line
draw.line(((x0,y0),(x0,y1)),fill='red',width=5) #draw left red line
# self.show_image()
def export_single(self):
if self.localdlinput['model']=='':
rownum=self.localdlinput['row']
colnum=self.localdlinput['col']
gridnum=rownum*colnum
filenamepart=os.path.splitext(self.file)
outputname=filenamepart[0]+'_crop_'
for i in range(gridnum):
index=i+1
row=int(i/colnum)
col=i%colnum
locs=np.where(self.npimage==index)
x0=min(locs[1])
y0=min(locs[0])
x1=max(locs[1])
y1=max(locs[0])
cropimage=self.RGBimg.crop((x0,y0,x1,y1))
cropimage.save(self.exportpath+'/'+outputname+str(index)+'.png','PNG')
return
#draw gridimg
for i in range(len(self.predres)):
if self.predres[i]==1:
locs=np.where(self.npimage==(i+1))
self.addbars(locs)
filenamepart=os.path.splitext(self.file)
outputname=filenamepart[0]+'_gridimg.png'
totalhealthy=self.predres.count(0)
totalinfect=self.predres.count(1)
from PIL.ExifTags import TAGS,GPSTAGS
imginfo=self.RGBimg.getexif()
if len(imginfo)>0:
exif_table={}
for tag,value in imginfo.items():
decoded=TAGS.get(tag,tag)
exif_table[decoded]=value
print(exif_table.keys())
if 'GPSInfo' in exif_table.keys():
gps_info={}
if type(exif_table['GPSInfo'])==dict:
for key in exif_table['GPSInfo'].keys():
decoded=GPSTAGS.get(key,key)
gps_info[decoded]=exif_table['GPSInfo'][key]
GPS_Lat=list(gps_info['GPSLatitude'])
GPS_Long=list(gps_info['GPSLongitude'])
latitude=str(GPS_Lat[0][0])+'.'+str(GPS_Lat[1][0])+"'"+str(GPS_Lat[2][0])+"''"
# print()
longitude=str(GPS_Long[0][0])+'.'+str(GPS_Long[1][0])+"'"+str(GPS_Long[2][0])+"''"
else:
longitude=0
latitude=0
else:
longitude=0
latitude=0
# print
else:
longitude=0
latitude=0
avg_confid=np.mean(np.array(self.confidence))
std_confid=np.std(np.array(self.confidence))
max_confid=np.max(np.array(self.confidence))
min_confid=np.min(np.array(self.confidence))
summary=[self.file,totalhealthy,totalinfect,longitude,latitude,avg_confid,std_confid,max_confid,min_confid]
import csv
outputcsv=os.path.join(self.exportpath,outputname+'_output.csv')
headline=['index','row','col','label','prediction','confidence']
with open(outputcsv,mode='w') as f:
csvwriter=csv.writer(f,lineterminator='\n')
csvwriter.writerow(headline)
rownum=self.localdlinput['row']
colnum=self.localdlinput['col']
gridnum=rownum*colnum
# outputimg=labelimage.copy()
draw=ImageDraw.Draw(self.RGBimg)
for i in range(gridnum):
index=i+1
row=int(i/colnum)
col=i%colnum
locs=np.where(self.npimage==index)
x0=min(locs[1])
y0=min(locs[0])
x1=max(locs[1])
y1=max(locs[0])
# if int(imageexport.get())==1:
# cropimage=RGBimg.crop((x0,y0,x1,y1))
# cropimage.save(outpath+'/'+originfile+'_crop_'+str(index)+'.png','PNG')
midx=x0+5
midy=y0+5
state='crop-'+str(index)
draw.text((midx-1, midy+1), text=state, fill='white')
draw.text((midx+1, midy+1), text=state, fill='white')
draw.text((midx-1, midy-1), text=state, fill='white')
draw.text((midx+1, midy-1), text=state, fill='white')
draw.text((midx,midy),text=state,fill='black')
# if exportoption.get()=='P':
# label=predictlabels[i]
# if exportoption.get()=='C':
# label=infectedlist[i]
label=0
# if confidence!=None:
# pred_label= 1 if list(confidence)[i]>=float(slider.get()) else 0
# confidvalue=list(confidence)[i]
# content=[index,row,col,label,pred_label,confidvalue]
# else:
# content = [index, row, col, label,0,0]
confidvalue=self.confidence[i]
pred_label=self.predres[i]
content=[index,row,col,label,pred_label,confidvalue]
csvwriter.writerow(content)
print(index)
self.RGBimg.save(os.path.join(self.exportpath,outputname),'PNG')
del draw
f.close()
return summary
def prediction(self):
if self.localdlinput['model']=='':
return
self.confidence=predictionCNN(self.localdlinput)
temppred=[0 for i in range(len(self.confidence))]
satisfiedpred=np.where(np.array(self.confidence)>=self.confidthres)
temppred=np.array(temppred)
temppred[satisfiedpred]=1
self.predres=list(temppred.copy())
pass
def drawgrid(self):
if self.localdlinput['model']=='':
return
row_stepsize = int(self.rgbheight / self.localdlinput['row'])
col_stepsize = int(self.rgbwidth / self.localdlinput['col'])
draw = ImageDraw.Draw(self.RGBimg)
row_start = 0
row_end = self.rgbheight
col_start = 0
col_end = self.rgbwidth
for col in range(0, col_end, col_stepsize):
line = ((col, row_start), (col, row_end))
draw.line(line, fill='white', width=5)
for row in range(0, row_end, row_stepsize):
line = ((col_start, row), (col_end, row))
draw.line(line, fill='white', width=5)
del draw
pass
def updatenpimage(self):
gridnum=self.localdlinput['row']*self.localdlinput['col']
row_stepsize=int(self.rgbheight/self.localdlinput['row'])
col_stepsize=int(self.rgbwidth/self.localdlinput['col'])
print(gridnum,row_stepsize,col_stepsize)
self.npimage=np.zeros((self.rgbheight,self.rgbwidth))
for i in range(gridnum):
c=i%self.localdlinput['col']
r=int(i/self.localdlinput['col'])
print(r,c)
self.npimage[r*row_stepsize:(r+1)*row_stepsize,c*col_stepsize:(c+1)*col_stepsize]=i+1
print(self.npimage)
def process(self):
orient={k:v for k,v in sorted(self.imgsize.items(), key=lambda item: item[1],reverse=True)}
# orient=sorted(self.imgsize.items(),reverse=True)
print(orient)
orientkeys=[key for key in orient.keys()]
print(orientkeys)
self.localdlinput=self.dlinput.copy()
if self.localdlinput[orientkeys[0]]<self.localdlinput[orientkeys[1]]:
temp=self.localdlinput[orientkeys[0]]
self.localdlinput[orientkeys[0]]=localdlinput[orientkeys[1]]
self.localdlinput[orientkeys[1]]=temp
self.drawgrid()
self.updatenpimage()
self.prediction()
summary=self.export_single()
return summary
def Open_batchfolder():
global batch_filenames
global FOLDER
batch_filenames=[]
FOLDER=filedialog.askdirectory()
if len(FOLDER)>0:
print(FOLDER)
files=os.listdir(FOLDER)
for filename in files:
if 'jpg' in filename or 'jpeg' in filename or 'JPG' in filename or 'tif' in filename:
batch_filenames.append(filename)
batch_filenames.sort()
print('filenames',batch_filenames)
return os.path.join(FOLDER,batch_filenames[0])
def batch_exportpath():
global exportpath
exportpath=filedialog.askdirectory()
while len(exportpath)==0:
exportpath=filedialog.askdirectory()
def batch_process(dlinput,inputconfidence):
if len(batch_filenames)==0:
messagebox.showerror('No files','Please load images to process')
return
cpunum=multiprocessing.cpu_count()
print('# of CPUs',cpunum)
starttime=time.time()
print('start time',starttime)
batch_summary=[]
head=['filename','healthy#','infected#','Longitude(E,W)','Latitude(N,S)','avg-confid','std-confid','max-confid','min-confid']
batch_summary.append(head)
for file in batch_filenames:
dlinput['imagepath']=os.path.join(FOLDER,file)
procobj=batch_ser_func(file,dlinput,inputconfidence)
filesummary=procobj.process()
batch_summary.append(filesummary)
del procobj
if dlinput['model']!='':
import csv
outputcsv=os.path.join(exportpath,'summary'+'_confidthres='+str(inputconfidence)+'_.csv')
with open(outputcsv,mode='w') as f:
csvwriter=csv.writer(f,lineterminator='\n')
if len(batch_summary)>0:
for ele in batch_summary:
csvwriter.writerow(ele)
f.close()
print('used time',time.time()-starttime)
messagebox.showinfo('Batch processing done','Batch process done!')
``` |
{
"source": "12hyhy12/cloud-recommender",
"score": 2
} |
#### File: restful/driver/detect.py
```python
import os,re
import time
import datetime
def execCmd(cmd):
r = os.popen(cmd)
text = r.read()
r.close()
return text
def main(name):
while(True):
cmd = "kubectl get pods"
result = execCmd(cmd)
result = result.split(os.linesep)
for item in result:
if item.find(name+'-driver'):
if item.find('Completed'):
#print(time.time())
return time.time()
sleep(1)
if __name__ == '__main__':
main("spark-pi")
``` |
{
"source": "12kleingordon34/football",
"score": 2
} |
#### File: gfootball/env/observation_preprocessing.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from gfootball.env import football_action_set
import numpy as np
from six.moves import range
from scipy.signal import convolve2d
SMM_WIDTH = 96
SMM_HEIGHT = 72
SMM_LAYERS = ['left_team', 'right_team', 'ball', 'active']
# Normalized minimap coordinates
MINIMAP_NORM_X_MIN = -1.0
MINIMAP_NORM_X_MAX = 1.0
MINIMAP_NORM_Y_MIN = -1.0 / 2.25
MINIMAP_NORM_Y_MAX = 1.0 / 2.25
_MARKER_VALUE = 255
def get_smm_layers(config):
return SMM_LAYERS
def mark_points(frame, points):
"""Draw dots corresponding to 'points'.
Args:
frame: 2-d matrix representing one SMM channel ([y, x])
points: a list of (x, y) coordinates to be marked
"""
for p in range(len(points) // 2):
x = int((points[p * 2] - MINIMAP_NORM_X_MIN) /
(MINIMAP_NORM_X_MAX - MINIMAP_NORM_X_MIN) * frame.shape[1])
y = int((points[p * 2 + 1] - MINIMAP_NORM_Y_MIN) /
(MINIMAP_NORM_Y_MAX - MINIMAP_NORM_Y_MIN) * frame.shape[0])
x = max(0, min(frame.shape[1] - 1, x))
y = max(0, min(frame.shape[0] - 1, y))
frame[y, x] = _MARKER_VALUE
def generate_smm(observation, kernel, config=None,
channel_dimensions=(SMM_WIDTH, SMM_HEIGHT)):
"""Returns a list of minimap observations given the raw features for each
active player.
Args:
observation: raw features from the environment
config: environment config
channel_dimensions: resolution of SMM to generate
Returns:
(N, H, W, C) - shaped np array representing SMM. N stands for the number of
players we are controlling.
"""
frame = np.zeros((len(observation), channel_dimensions[1],
channel_dimensions[0], len(get_smm_layers(config))),
dtype=np.uint8)
for o_i, o in enumerate(observation):
for index, layer in enumerate(get_smm_layers(config)):
assert layer in o
if layer == 'active':
if o[layer] == -1:
continue
mark_points(frame[o_i, :, :, index],
np.array(o['left_team'][o[layer]]).reshape(-1))
else:
mark_points(frame[o_i, :, :, index], np.array(o[layer]).reshape(-1))
frame[o_i,:,:,index] = convolve2d(frame[o_i,:,:,index], kernel) # convolve the (x,y) coords of each timestep and channel
return frame
``` |
{
"source": "12leclarkson/spotipy",
"score": 3
} |
#### File: spotipy/examples/artist_recommendations.py
```python
import argparse
import logging
import spotipy
from spotipy.oauth2 import SpotifyClientCredentials
logger = logging.getLogger('examples.artist_recommendations')
logging.basicConfig(level='INFO')
client_credentials_manager = SpotifyClientCredentials()
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
def get_args():
parser = argparse.ArgumentParser(description='Recommendations for the '
'given artist')
parser.add_argument('-a', '--artist', required=True, help='Name of Artist')
return parser.parse_args()
def get_artist(name):
results = sp.search(q='artist:' + name, type='artist')
items = results['artists']['items']
if len(items) > 0:
return items[0]
else:
return None
def show_recommendations_for_artist(artist):
results = sp.recommendations(seed_artists=[artist['id']])
for track in results['tracks']:
logger.info('Recommendation: %s - %s', track['name'],
track['artists'][0]['name'])
def main():
args = get_args()
artist = get_artist(args.artist)
if artist:
show_recommendations_for_artist(artist)
else:
logger.error("Can't find that artist", args.artist)
if __name__ == '__main__':
main()
``` |
{
"source": "12libao/tmr",
"score": 3
} |
#### File: examples/cylinder/cylinder_effectivity_plot.py
```python
from __future__ import print_function
import tikzplots as tkz
import argparse
import numpy as np
import re
def parse_data_file(fname):
with open(fname, 'r') as fp:
lines = fp.readlines()
# Read in the first line, and find the comma-separated values
# in the header
hline = lines[0]
for index, h in enumerate(hline):
if h == '=':
hstr = hline[index+1:].split(',')
# Strip away any white space
header = []
for h in hstr:
header.append(h.strip())
data = []
for line in lines[1:]:
dline = []
for entry in line.split():
dline.append(float(entry))
data.append(dline)
return header, np.array(data)
# Create an argument parser to read in arguments from the commnad line
p = argparse.ArgumentParser()
p.add_argument('--files', nargs='+', type=str, help='List of files')
p.add_argument('--labels', nargs='+', type=str, help='List of labels')
p.add_argument('--outfile', type=str, default='output.tex')
p.add_argument('--plot', type=str, default='effectivity')
args = p.parse_args()
# Set the colors to use for each set of bars
colors = []
for i in range(10):
colors.append('tableau%d'%(i))
tikzcolors = '''
\definecolor{tableau0}{RGB}{31,119,180}
\definecolor{tableau1}{RGB}{255,158,74}
\definecolor{tableau2}{RGB}{103,191,92}
\definecolor{tableau3}{RGB}{237,102,93}
\definecolor{tableau4}{RGB}{148,103,189}
\definecolor{tableau5}{RGB}{168,120,110}
\definecolor{tableau6}{RGB}{237,151,202}
\definecolor{tableau7}{RGB}{162,162,162}
\definecolor{tableau8}{RGB}{205,204,93}
\definecolor{tableau9}{RGB}{109,204,218}
'''
data = []
for fname in args.files:
try:
header, dat = parse_data_file(fname)
except:
print('fname = ', fname)
data.append(dat)
# Plot the error on the y-axis
nnodes_index = header.index('nnodes')
fval_eff_index = header.index('fval_effectivity')
indc_eff_index = header.index('indicator_effectivity')
# Find the max value of y
xmin = 1e20
xmax = 0
ymin = 0
ymax = 0
# Look through all the data
for d in data:
xmin = min(xmin, np.min(d[:, nnodes_index]))
xmax = max(xmax, np.max(d[:, nnodes_index]))
if args.plot == 'effectivity':
ymax = max(ymax, np.max(d[:, fval_eff_index]))
ymax = min(ymax, 100)
else:
ymax = max(ymax, np.max(d[:, indc_eff_index]))
ymax = min(ymax, 500)
# Round to the nearest multiple of 10
xmin = int(np.floor(np.log10(xmin)))
xmax = int(np.ceil(np.log10(xmax)))
# Create a range
xticks = np.linspace(xmin, xmax, xmax - xmin + 1)
xtick_labels = []
for exp in range(xmin, xmax + 1, 1):
xtick_labels.append('$10^{%d}$'%(exp))
# Set the positions of the tick locations
if ymax < 2.0:
ymax_int = int(np.ceil(4.0*ymax))
ymax = ymax_int/4.0
yticks = np.linspace(0, ymax, ymax_int+1)
ytick_labels = yticks
elif ymax < 10:
ymax = int(np.ceil(ymax))
yticks = np.linspace(0, ymax, ymax+1)
ytick_labels = range(ymax+1)
elif ymax < 20:
ymax = 2*int(np.ceil(ymax/2.0))
yticks = np.linspace(0, ymax, ymax+1)
ytick_labels = range(0, ymax+1, 2)
yticks = np.linspace(0, ymax, ymax/2 + 1)
else:
ymax = 5*int(np.ceil(ymax/5.0))
yticks = np.linspace(0, ymax, ymax+1)
ytick_labels = range(0, ymax+1, 5)
yticks = np.linspace(0, ymax, ymax/5 + 1)
# The overall dimensions
xdim = 2.0
xscale = xdim/(xmax - xmin)
ydim = 1.75
yscale = ydim/(ymax - ymin)
# Get the header info
s = tkz.get_header()
s += tkz.get_begin_tikz(xdim=1.5, ydim=1.5, xunit='in', yunit='in')
s += tikzcolors
symbols = ['circle', 'square', 'triangle', 'delta', 'diamond']
for k, d in enumerate(data):
xvals = np.log10(d[:, nnodes_index])
if args.plot == 'effectivity':
yvals = d[:, fval_eff_index]
else:
yvals = d[:, indc_eff_index]
s += tkz.get_2d_plot(xvals, yvals,
line_dim='very thick',
color=colors[k % 10],
symbol=symbols[k % 4],
symbol_size=0.035,
xscale=xscale, yscale=yscale,
xmin=xmin, xmax=xmax,
ymin=ymin, ymax=ymax)
# Set the labels (lower-right corner)
if args.labels is not None:
for k, label in enumerate(args.labels):
x = xmin + 0.75*(xmax - xmin)
y = ymin + 0.05*(ymax - ymin)*(len(args.labels)-k)
length = 0.035*(xmax - xmin)
s += tkz.get_legend_entry(x, y, length, label=label,
font_size='small',
line_dim='very thick',
color=colors[k % 10], symbol=symbols[k % 4],
symbol_size=0.035,
xscale=xscale, yscale=yscale)
if args.plot == 'effectivity':
title = 'Effectivity'
else:
title = 'Indicator effectivity'
# Plot the axes
s += tkz.get_2d_axes(xmin, xmax, ymin, ymax,
xscale=xscale, yscale=yscale,
xticks=xticks, yticks=yticks,
xtick_labels=xtick_labels,
ytick_labels=ytick_labels,
tick_font='normalsize',
tick_frac=0.01,
xlabel_offset=0.085,
label_font='Large',
xlabel='Number of nodes',
ylabel_offset=0.175,
ylabel=title)
s += tkz.get_end_tikz()
fp = open(args.outfile, 'w')
fp.write(s)
fp.close()
```
#### File: examples/geomach/geomach.py
```python
from __future__ import print_function, division
from mpi4py import MPI
from tmr import TMR
import numpy as np
import argparse
from GeoMACH.PGM.core import PGMconfiguration, PGMparameter, PGMdv
from GeoMACH.PGM.components import PGMwing, PGMbody, PGMshell
from GeoMACH.PGM.components import PGMjunction, PGMtip, PGMcone
class Wing(PGMconfiguration):
def _define_comps(self):
self.comps['wing'] = PGMwing(num_x=1, num_z=1, left_closed=True)
self.comps['tip'] = PGMtip(self, 'wing', 'left', 0.1)
def _define_params(self):
wing = self.comps['wing'].props
wing['pos'].params[''] = PGMparameter(3, 3, pos_u=[0,0.37,1.0])
wing['scl'].params[''] = PGMparameter(3, 1, pos_u=[0,0.37,1.0])
def _compute_params(self):
wing = self.comps['wing'].props
wing['pos'].params[''].data[0, :] = [904.294, 174.126, 0.0]
wing['pos'].params[''].data[1, :] = [1225.82, 181.071, 427.999]
wing['pos'].params[''].data[2, :] = [1780.737, 263.827, 1156.753]
wing['scl'].params[''].data[:, 0] = [536.181, 285.782, 107.4]
return [], [], []
def _set_bspline_options(self):
wing = self.comps['wing'].faces
wing['upp'].set_option('num_cp', 'u', [40])
wing['upp'].set_option('num_cp', 'v', [40])
class Trussbraced(PGMconfiguration):
def _define_comps(self):
self.comps['fuse'] = PGMbody(num_x=17, num_y=6, num_z=4)
self.comps['lwing'] = PGMwing(num_x=7, num_z=7, left_closed=True)
self.comps['lstrut'] = PGMwing(num_x=4, num_z=4,
left_closed=True, right_closed=True)
self.comps['lv'] = PGMwing(num_z=4)
self.comps['ltail'] = PGMwing(left_closed=True)
self.comps['vtail'] = PGMwing(num_x=5, num_z=4, left_closed=True)
self.comps['fuse_f'] = PGMcone(self, 'fuse', 'front', 18)
self.comps['fuse_r'] = PGMcone(self, 'fuse', 'rear', 2)
self.comps['lwing_t'] = PGMtip(self, 'lwing', 'left', 0.1)
self.comps['ltail_t'] = PGMtip(self, 'ltail', 'left', 0.1)
self.comps['vtail_t'] = PGMtip(self, 'vtail', 'left', 0.1)
self.comps['lwing_fuse'] = PGMjunction(self, 'fuse', 'lft', 'E',
[0,1], 'lwing', 'right',
fweight=4, mweight=2)
self.comps['lstrut_fuse'] = PGMjunction(self, 'fuse', 'lft', 'E',
[4,2], 'lstrut', 'right')
self.comps['lstrut_lwing'] = PGMjunction(self, 'lwing', 'low', 'S',
[4,1], 'lstrut', 'left',
fweight=3, mweight=3)
self.comps['lv_lwing'] = PGMjunction(self, 'lwing', 'low', 'S',
[1,3], 'lv', 'left')
self.comps['lv_lstrut'] = PGMjunction(self, 'lstrut', 'upp', 'S',
[1,0], 'lv', 'right')
self.comps['vtail_fuse'] = PGMjunction(self, 'fuse', 'top', 'E',
[1,10], 'vtail', 'right')
self.comps['ltail_vtail'] = PGMjunction(self, 'vtail', 'low', 'N',
[0,1], 'ltail', 'right')
def _define_params(self):
fuse = self.comps['fuse'].props
fuse['nor'].params[''] = PGMparameter(1, 3)
fuse['pos'].params[''] = PGMparameter(2, 3)
fuse['pos'].params['nose'] = PGMparameter(2, 3, pos_u=[0,0.12])
fuse['pos'].params['tail'] = PGMparameter(2, 3, pos_u=[0.76,1.0])
fuse['scl'].params['rad1'] = PGMparameter(4, 1, order_u=4,
pos_u=[0,0.01,0.05,0.12])
fuse['scl'].params['rad2'] = PGMparameter(2, 1, pos_u=[0.12,0.76])
fuse['scl'].params['rad3'] = PGMparameter(4, 1, order_u=4,
pos_u=[0.76,0.83,0.99,1])
fuse['scl'].params['tail'] = PGMparameter(2, 3, pos_u=[0.76,1.0])
fuse['flt'].params['flt1a'] = PGMparameter(4, 2, order_u=4,
pos_u=[0.24,0.27,0.33,0.36],
pos_v=[0.5,1])
fuse['flt'].params['flt1b'] = PGMparameter(2, 2,
pos_u=[0.36,0.41],
pos_v=[0.5,1])
fuse['flt'].params['flt1c'] = PGMparameter(4, 2, order_u=4,
pos_u=[0.41,0.44,0.49,0.52],
pos_v=[0.5,1])
fuse['flt'].params['flt2a'] = PGMparameter(4, 2, order_u=4,
pos_u=[0.24,0.27,0.33,0.36],
pos_v=[0,0.5])
fuse['flt'].params['flt2b'] = PGMparameter(2, 2,
pos_u=[0.36,0.41],
pos_v=[0,0.5])
fuse['flt'].params['flt2c'] = PGMparameter(4, 2, order_u=4,
pos_u=[0.41,0.44,0.49,0.52],
pos_v=[0,0.5])
lwing = self.comps['lwing'].props
lwing['pos'].params[''] = PGMparameter(1, 3)
lwing['scl'].params[''] = PGMparameter(2, 1)
lwing['pos'].params['lin'] = PGMparameter(3, 3, order_u=3)
lstrut = self.comps['lstrut'].props
lstrut['pos'].params[''] = PGMparameter(1, 3)
lstrut['pos'].params['lin'] = PGMparameter(2, 3)
lstrut['scl'].params[''] = PGMparameter(2, 1)
lstrut['nor'].params[''] = PGMparameter(1, 1)
lv = self.comps['lv'].props
lv['pos'].params[''] = PGMparameter(1, 3)
lv['pos'].params['lin'] = PGMparameter(2, 3)
lv['scl'].params[''] = PGMparameter(2, 1)
lv['nor'].params[''] = PGMparameter(1, 1)
lv['rot'].params[''] = PGMparameter(2, 3, pos_u=[0,1])
ltail = self.comps['ltail'].props
ltail['pos'].params[''] = PGMparameter(1, 3)
ltail['pos'].params['lin'] = PGMparameter(2, 3)
ltail['scl'].params[''] = PGMparameter(2, 1)
vtail = self.comps['vtail'].props
vtail['pos'].params[''] = PGMparameter(1, 3)
vtail['pos'].params['lin'] = PGMparameter(2, 3)
vtail['scl'].params[''] = PGMparameter(2, 1)
vtail['nor'].params[''] = PGMparameter(1, 3)
def _compute_params(self):
fuse = self.comps['fuse'].props
fuse['nor'].params[''].val([1.0,0.0,1.0])
fuse['pos'].params[''].val([[0,0,0],[36,0,0]])
fuse['pos'].params['nose'].val([[0,-0.4,0],[0,0,0]])
fuse['pos'].params['tail'].val([[0,0,0],[0,1.6,0]])
fuse['scl'].params['rad1'].val([1,1.2,1.9,2])
fuse['scl'].params['rad2'].val([2,2])
fuse['scl'].params['rad3'].val([2,1.7,0.6,0.4])
fuse['scl'].params['tail'].val([[0,0,0],[-0.3,0,0]])
fuse['flt'].params['flt1a'].val([[0,0],[0,0],[0.6,0.6],[0.6,0.6]])
fuse['flt'].params['flt1b'].val([[0.6,0.6],[0.6,0.6]])
fuse['flt'].params['flt1c'].val([[0.6,0.6],[0.6,0.6],[0,0],[0,0]])
fuse['flt'].params['flt2a'].val([[0,0],[0,0],[1,1],[1,1]])
fuse['flt'].params['flt2b'].val([[1,1],[1,1]])
fuse['flt'].params['flt2c'].val([[1,1],[1,1],[0,0],[0,0]])
lwing = self.comps['lwing'].props
lwing['pos'].params[''].val([12,1.7,2.8])
lwing['scl'].params[''].val([3.6,0.8])
lwing['pos'].params['lin'].val([[0,0,0],[2.5,-0.1,11],[5,-0.8,22]])
lstrut = self.comps['lstrut'].props
lstrut['pos'].params[''].val([13.4,-1.6,2.6])
lstrut['pos'].params['lin'].val([[0,0,0],[1.6,2.6,11.8]])
lstrut['scl'].params[''].val([1.8,1.6])
lstrut['nor'].params[''].val([1.0])
lv = self.comps['lv'].props
lv['pos'].params[''].val([14.3,-0.12,8.8])
lv['pos'].params['lin'].val([[0,0,0],[0,1.58,0]])
lv['scl'].params[''].val([1.1,1.1])
lv['nor'].params[''].val([1.0])
lv['rot'].params[''].val([[0,2,0],[0,-2,0]])
ltail = self.comps['ltail'].props
ltail['pos'].params[''].val([35.3,6.6,0.25])
ltail['pos'].params['lin'].val([[0,0,0],[2.6,0,5]])
ltail['scl'].params[''].val([3.3,1])
vtail = self.comps['vtail'].props
vtail['pos'].params[''].val([30.7,2.1,0])
vtail['pos'].params['lin'].val([[0,0,0],[4.6,5,0]])
vtail['scl'].params[''].val([5,4.5])
vtail['nor'].params[''].val([1.0,0.0,0.0])
return [], [], []
def _set_bspline_options(self):
comps = self.comps
comps['fuse'].faces['lft'].set_option('num_cp', 'u', [4,4,14,14,4,4])
comps['fuse'].faces['rgt'].set_option(
'num_cp', 'v', [85,4,4,4,4,4,4,4,4,4,102,4,4,16,8,4,6])
comps['vtail'].faces['low'].set_option('num_cp', 'u', [6,4,30,4,4])
comps['vtail'].faces['low'].set_option('num_cp', 'v', [10,10,10,4])
comps['lwing'].faces['upp'].set_option(
'num_cp', 'v', [20,4,4,20,5,4,31])
comps['lwing'].faces['low'].set_option(
'num_cp', 'u', [12,12,20,4,4,4,4])
comps['lstrut'].faces['upp'].set_option('num_cp', 'u', [4,8,12,4])
comps['lstrut'].faces['upp'].set_option('num_cp', 'v', [4,5,4,4])
def geomach_to_tmr(bse):
'''
Convert a BSE-GeoMach model to a TMR model
'''
# Compute the number of vertices, edges and faces
num = bse._num
nverts = num['vert']
nedges = num['edge']
nfaces = num['surf']
# Create the list of edges, faces and
verts = nverts*[ None ]
edges = nedges*[ None ]
faces = nfaces*[ None ]
surfs = nfaces*[ None ]
# Set the topology object
topo = bse._topo
size = bse._size
str_indices = bse._str_indices
bspline = bse._bspline
# Extract the control point array
cp_str = bse.vec['cp_str'].array
# Point from the edge index on each face to the i/j locations
# in the surf_ptrs array
face_to_edge = [[1, 0], [2, 1], [1, 2], [0, 1]]
# Point from the edge index on each face to the corresponding
# vertices on each face
face_to_edge_verts = [
[[0, 0], [2, 0]],
[[2, 0], [2, 2]],
[[0, 2], [2, 2]],
[[0, 0], [0, 2]]]
# Create the surfaces
surf_ptrs = topo['surf_ptrs']
edge_ptrs = topo['edge_ptrs']
for i in range(nfaces):
cp_offset = str_indices['cp'][i,0]
ku = bspline['order'][topo['surf_group'][i,0]-1]
kv = bspline['order'][topo['surf_group'][i,1]-1]
nu = bspline['num_cp'][topo['surf_group'][i,0]-1]
nv = bspline['num_cp'][topo['surf_group'][i,1]-1]
# Extract and create the b-spline surfaces
cp = np.zeros((nu, nv, 3), dtype=np.double)
for jj in range(nv):
for ii in range(nu):
cp_index = cp_offset + ii + jj*nu
cp[ii, jj, :] = cp_str[cp_index]
surfs[i] = TMR.BsplineSurface(cp, ku=ku, kv=kv)
faces[i] = TMR.FaceFromSurface(surfs[i])
# Create the vertices from the faces
for i in range(nfaces):
for jj in range(2):
for ii in range(2):
# Get the vertex index
index = surf_ptrs[i, 2*ii, 2*jj]-1
# If the vertex has not be allocated, create it now
if verts[index] is None:
u = 1.0*ii
v = 1.0*jj
verts[index] = TMR.VertexFromFace(faces[i], u, v)
for i in range(nverts):
if verts[i] is None:
raise ValueError('TMRVertex %d was not initialized\n'%(i))
# Create the edges
for i in range(nfaces):
for ii in range(4):
i1 = face_to_edge[ii][0]
j1 = face_to_edge[ii][1]
# Keep track of the direction
edge_num = surf_ptrs[i, i1, j1]
index = abs(edge_num)-1
# Find the vertex numbers
v1 = edge_ptrs[index, 0]-1
v2 = edge_ptrs[index, 1]-1
# The start/end vertex location
vert1 = None
vert2 = None
# Get the indices of the vertices within the surf_ptrs array
i1 = face_to_edge_verts[ii][0][0]
j1 = face_to_edge_verts[ii][0][1]
i2 = face_to_edge_verts[ii][1][0]
j2 = face_to_edge_verts[ii][1][1]
if (v1 == surf_ptrs[i, i1, j1]-1 and
v2 == surf_ptrs[i, i2, j2]-1):
vert1 = verts[v1]
vert2 = verts[v2]
pts = np.array([face_to_edge_verts[ii][0],
face_to_edge_verts[ii][1]], dtype=np.double)
elif (v2 == surf_ptrs[i, i1, j1]-1 and
v1 == surf_ptrs[i, i2, j2]-1):
vert1 = verts[v2]
vert2 = verts[v1]
pts = np.array([face_to_edge_verts[ii][1],
face_to_edge_verts[ii][0]], dtype=np.double)
pts = pts/2.0
# Check whether this is a degenerate edge
is_degen = 0
if v1 == v2:
is_degen = 1
# Create the parametric curve
pcurve = TMR.BsplinePcurve(pts)
if edges[index] is None:
edges[index] = TMR.EdgeFromFace(faces[i], pcurve, is_degen)
edges[index].setVertices(vert1, vert2)
else:
edges[index].addEdgeFromFace(faces[i], pcurve)
for i in range(nedges):
if edges[i] is None:
raise ValueError('TMREdge %d was not initialized\n'%(i))
# After all of the edges are created, create the edge loops
# for each face. Account for the CCW orientation.
ccw_order = [1, 1, -1, -1]
for i in range(nfaces):
# Find the edges and directions for this edge loop
e = []
dirs = []
for ii in range(4):
i1 = face_to_edge[ii][0]
j1 = face_to_edge[ii][1]
edge_num = surf_ptrs[i, i1, j1]
e.append(edges[abs(edge_num)-1])
dirs.append(np.sign(edge_num)*ccw_order[ii])
# Set the loop
loop = TMR.EdgeLoop(e, dirs)
faces[i].addEdgeLoop(loop)
# Create the model
geo = TMR.Model(verts, edges, faces)
return geo
# Create an argument parser to read in arguments from the commnad line
p = argparse.ArgumentParser()
p.add_argument('--htarget', type=float, default=10.0)
p.add_argument('--model_type', type=str, default='wing')
args = p.parse_args()
# Create the GeoMACH model
print('Loading GeoMACH model...')
if args.model_type == 'wing':
pgm = Wing()
elif args.model_type == 'trussbraced':
pgm = Trussbraced()
bse = pgm.initialize()
# Convert from GeoMACH to TMR
print('Converting GeoMACH model to TMR model...')
geo = geomach_to_tmr(bse)
# Create the mesh
print('Meshing TMR model...')
comm = MPI.COMM_WORLD
mesh = TMR.Mesh(comm, geo)
# Mesh the part
opts = TMR.MeshOptions()
opts.mesh_type_default = TMR.TRIANGLE
opts.num_smoothing_steps = 10
opts.triangularize_print_level = 1
opts.triangularize_print_iter = 10000
opts.write_mesh_quality_histogram = 1
# Mesh the geometry with the given target size
htarget = args.htarget
mesh.mesh(htarget, opts=opts)
mesh.writeToVTK('surface-mesh.vtk')
# Create a model from the mesh
print('Creating model from mesh...')
model = mesh.createModelFromMesh()
# Create the corresponding mesh topology from the mesh-model
topo = TMR.Topology(comm, model)
# Create the quad forest and set the topology of the forest
print('Creating TMRQuadForest...')
forest = TMR.QuadForest(comm)
forest.setTopology(topo)
# Create random trees and balance the mesh. Print the output file
forest.createRandomTrees(nrand=3, max_lev=3)
forest.balance(1)
forest.writeForestToVTK('forest-mesh%d.vtk'%(comm.rank))
```
#### File: examples/quality/bracket_quality.py
```python
from __future__ import print_function
from mpi4py import MPI
from tmr import TMR
from egads4py import egads
import numpy as np
import argparse
import os
from OctMeshQuality import *
def get_edge_dirs_verts(elist):
edge_list = elist[:]
edges = [edge_list.pop()]
dirs = [1]
v1, vnext = edges[-1].getVertices()
verts = [v1, vnext]
nedges = len(edge_list)
for k in range(nedges):
for i, edge in enumerate(edge_list):
v1, v2 = edge.getVertices()
if v1.getEntityId() == vnext.getEntityId():
dirs.append(1)
edges.append(edge_list.pop(i))
vnext = v2
break
elif v2.getEntityId() == vnext.getEntityId():
dirs.append(-1)
edges.append(edge_list.pop(i))
vnext = v1
break
verts.append(vnext)
return edges, dirs, verts[:-1]
def load_model():
# Create the egads context
ctx = egads.context()
parts = []
r0 = 0.05
# Create the boxes
x0 = [0, 0, 0]
x1 = [0.25, 0.25, 0.25]
B0 = ctx.makeSolidBody(egads.BOX, rdata=[x0, x1])
parts.append(ctx.makeTopology(egads.MODEL, children=[B0]))
# Create the x-arm
x0 = [0.25, 0, 0]
x1 = [0.75, 0.25, 0.25]
B1 = ctx.makeSolidBody(egads.BOX, rdata=[x0, x1])
x0 = [0.85, 0.125, 0]
x1 = [0.85, 0.125, 0.25]
C1 = ctx.makeSolidBody(egads.CYLINDER, rdata=[x0, x1, r0])
parts.append(B1.solidBoolean(C1, egads.SUBTRACTION))
# Create the y-arm
x0 = [0, 0.25, 0]
x1 = [0.25, 0.75, 0.25]
B2 = ctx.makeSolidBody(egads.BOX, rdata=[x0, x1])
x0 = [0, 0.85, 0.125]
x1 = [0.25, 0.85, 0.125]
C2 = ctx.makeSolidBody(egads.CYLINDER, rdata=[x0, x1, r0])
parts.append(B2.solidBoolean(C2, egads.SUBTRACTION))
# Create the z-arm
x0 = [0, 0, 0.25]
x1 = [0.25, 0.25, 0.75]
B3 = ctx.makeSolidBody(egads.BOX, rdata=[x0, x1])
x0 = [0.125, 0, 0.85]
x1 = [0.125, 0.25, 0.85]
C3 = ctx.makeSolidBody(egads.CYLINDER, rdata=[x0, x1, r0])
parts.append(B3.solidBoolean(C3, egads.SUBTRACTION))
# Create all of the models
geos = []
for p in parts:
geos.append(TMR.ConvertEGADSModel(p))
# Create the full list of vertices, edges, faces and volumes
verts = []
edges = []
faces = []
vols = []
for geo in geos:
verts.extend(geo.getVertices())
edges.extend(geo.getEdges())
faces.extend(geo.getFaces())
vols.extend(geo.getVolumes())
# Set all of the matching faces
TMR.setMatchingFaces(geos)
# Create the geometry
geo = TMR.Model(verts, edges, faces, vols)
return geo
# The communicator
comm = MPI.COMM_WORLD
# Load the geometry model
geo = load_model()
# Create the mesh
mesh = TMR.Mesh(comm, geo)
# Set the meshing options
opts = TMR.MeshOptions()
opts.frontal_quality_factor = 1.25
opts.num_smoothing_steps = 50
opts.triangularize_print_iter = 5
opts.write_mesh_quality_histogram = 1
# Create the surface mesh
mesh.mesh(0.02, opts)
# Create a model from the mesh
model = mesh.createModelFromMesh()
# Create the corresponding mesh topology from the mesh-model
topo = TMR.Topology(comm, model)
# Create the quad forest and set the topology of the forest
forest = TMR.OctForest(comm)
forest.setTopology(topo)
forest.createTrees(1)
forest.createNodes()
ar = computeAR(forest)
min_ang = computeMinAngle(forest)
fshape = computeShape(forest)
# Wrtie the mesh quality to vtk
writeQualityToVtk(forest, ar, min_ang, fshape,fname='quality-bracket.vtk')
plotShapeHist(fshape, xmin=np.amin(fshape), fname='bracket_shape_hist.pdf')
plotARHist(ar, fname='bracket_ar_hist.pdf', xmax=np.ceil(4.0*np.amax(ar))/4.0)
plotMinAngHist(min_ang, fname='bracket_ang_hist.pdf', xmin=np.amin(min_ang)-1.0)
```
#### File: examples/quality/OctMeshQuality.py
```python
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.ticker import PercentFormatter
import matplotlib.colors as colors
import matplotlib.cm as cmx
# Configure the plotting environment
plt.rcParams['xtick.direction'] = 'out'
plt.rcParams['ytick.direction'] = 'out'
# Optionally set font to Computer Modern to avoid common missing font errors
params = {
'axes.labelsize': 20,
'legend.fontsize': 14,
'xtick.labelsize': 20,
'ytick.labelsize': 20,
'text.usetex': True}
plt.rcParams.update(params)
# Latex math
plt.rcParams['text.latex.preamble'] = [r'\usepackage{sfmath}']
plt.rcParams['font.family'] = 'sans-serif'
# plt.rcParams['font.sans-serif'] = 'courier'
plt.rcParams['font.size'] = 18
plt.rcParams['font.weight'] = 'bold'
plt.rcParams['lines.linewidth'] = 4
plt.rcParams['lines.color'] = 'r'
# Make sure everything is within the frame
plt.rcParams.update({'figure.autolayout': True})
# These are the "Tableau 20" colors as RGB.
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
# Color scheme
hist_color = tableau20[0]
# Compute the relative size of the element in the forest
def computeShape(forest):
octs = forest.getOctants()
Xpts = forest.getPoints()
conn = forest.getMeshConn()
fshape = np.zeros(len(octs))
jacobian_list={0:[1,2,4], 1:[3,0,5], 2:[0,3,6],
3:[2,1,7], 4:[6,5,0], 5:[4,7,1],
6:[7,4,2], 7:[5,6,3]}
for i in range(len(octs)):
npts = 8
nodes = conn[i,:]
pts = Xpts[nodes[:], :]
num = 0.0
# Compute the individual Ak matrices
for j in range(npts):
# Get the reference vertices of volume
v = jacobian_list[j]
Ak = np.array([pts[v[0],:]-pts[j,:], pts[v[1],:]-pts[j,:], \
pts[v[2],:]-pts[j,:] ])
# Get determinant of local matrix alpha
#alpha[j] = np.linalg.det(Ak)
#sigma[j] = np.sum(np.dot(Ak.T, Ak).diagonal())
num += np.sum(np.dot(Ak.T, Ak).diagonal())/np.linalg.det(Ak)**(2./3.)
# Compute the shape metric 24/num
fshape[i] = 24./num
return fshape
# Compute the aspect ratio of each element in the forest
def computeAR(forest):
octs = forest.getOctants()
Xpts = forest.getPoints()
conn = forest.getMeshConn()
ar = np.zeros(len(octs))
# List of vertices which create an edge on the octant
edge_list = [[0, 1], [1, 3], [2, 3], [0, 2], [4, 5], [5, 7],
[6, 7], [4, 6], [1, 5], [3, 7], [2, 6], [0, 4]]
for i in range(len(octs)):
# Get the points
npts = 8
nodes = conn[i,:]
pts = Xpts[nodes[:], :]
edge_lengths = np.zeros(12)
# Compute the length of each edge on the octant
for j, edge_pts in enumerate(edge_list):
v1 = pts[edge_pts[0],:]
v2 = pts[edge_pts[1],:]
edge_lengths[j] = np.linalg.norm(v1-v2)
# Compute the aspect ratio of the octant
ar[i] = np.amax(edge_lengths)/np.amin(edge_lengths)
return ar
# Compute the minimum angle of each element in the forest
def computeMinAngle(forest):
octs = forest.getOctants()
Xpts = forest.getPoints()
conn = forest.getMeshConn()
min_angs = np.zeros(len(octs))
# List of verticies corresponding to interior angles
angle_list = {0:[1, 2, 4], 1:[0, 5, 3],
2:[0, 6, 3], 3:[1, 2, 7],
4:[0, 5, 6], 5:[1, 4, 7],
6:[2, 4, 7], 7:[3, 5, 6]}
for i in range(len(octs)):
# Get the points
npts = 8
nodes = conn[i,:]
pts = Xpts[nodes[:], :]
min_angle = 90.0
for j in range(npts):
node_neighbors = angle_list[j]
for k in node_neighbors:
for l in node_neighbors:
if k != l:
# Compute 2 vectors from 3 unique points
vec1 = pts[k, :] - pts[j, :]
vec2 = pts[l, :] - pts[j, :]
# Make the vectors unit vectors
if (np.linalg.norm(vec1) != np.float64(0.0)) and (np.linalg.norm(vec2) != np.float64(0.0)):
vec1 /= np.linalg.norm(vec1)
vec2 /= np.linalg.norm(vec2)
# Compute the angle between the vectors as long as they
# are different
if np.array_equal(vec1, vec2) is False:
angle = (180.0/np.pi)*np.arccos(np.dot(vec1, vec2))
# Update min_angle if needed
if angle < min_angle:
min_angle = angle
min_angs[i] = min_angle
return min_angs
# Write out mesh quality metrics to vtk file
def writeQualityToVtk(forest, ar, min_ang, fshape, fname='quality.vtk'):
octs = forest.getOctants()
Xpts = forest.getPoints()
conn = forest.getMeshConn()
npts = len(Xpts)
nhex = len(octs)
f = open(fname, 'w')
f.write('# vtk DataFile Version 3.0\n')
f.write('vtk output\nASCII\n')
f.write('DATASET UNSTRUCTURED_GRID\n')
# write out the points
f.write('POINTS %d float\n'%(npts))
for pt in Xpts:
f.write('%e %e %e\n'%(pt[0], pt[1], pt[2]))
# write out the mesh connectivity
f.write('\nCELLS %d %d\n'%(nhex, nhex*9))
for i in range(len(octs)):
nodes = conn[i,:]
f.write('8 %d %d %d %d %d %d %d %d\n'%(
nodes[0], nodes[1], nodes[3], nodes[2],
nodes[4], nodes[5], nodes[7], nodes[6]))
# all hex
f.write('\nCELL_TYPES %d\n'%(nhex))
for i in range(nhex):
f.write('%d\n'%(12))
# write AR values
f.write('\nCELL_DATA %d\n'%(nhex))
f.write('SCALARS aspect_ratio float 1\n')
f.write('LOOKUP_TABLE default\n')
for elem_ar in ar:
f.write('%e\n'%(elem_ar))
# write minimum angle values
f.write('SCALARS min_angle float 1\n')
f.write('LOOKUP_TABLE default\n')
for elem_ang in min_ang:
f.write('%e\n'%(elem_ang))
# Write the shape metric
# write minimum angle values
f.write('SCALARS shape_metric float 1\n')
f.write('LOOKUP_TABLE default\n')
for elem_shape in fshape:
f.write('%e\n'%(elem_shape))
f.close()
return
def plotARHist(ar, xmax=None, fname='ar_hist.pdf'):
hist_max = int(np.ceil(np.amax(ar)))
nbins = 40*(hist_max-1)
# Create the figure and set parameters
fig, ax = plt.subplots(1, 1, figsize=(12, 9))
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Plot the histogram
n, bins, patches = plt.hist(ar, bins=nbins, range=(1.0, hist_max),
density=True, stacked=False)
widths = bins[:-1] - bins[1:]
# Set the colors
norm = colors.Normalize(ar.min(), ar.max())
for b, p in zip(bins, patches):
color = plt.cm.coolwarm(norm(b))
p.set_facecolor(color)
# Configure the axes
if xmax:
plt.xlim((1.0, xmax))
else:
plt.xlim((1.0, hist_max))
plt.xlabel('Aspect Ratio')
ax.yaxis.set_major_formatter(PercentFormatter(xmax=-1.0/widths[0], decimals=0))
ax.tick_params(which='both', direction='out', top=False, right=False)
plt.savefig(fname, bbox_inches='tight', pad_inches=0.05)
return
def plotMinAngHist(min_ang, xmin=None, fname='angle_hist.pdf'):
hist_max = 90.0
nbins = 90
# Create the figure and set parameters
fig, ax = plt.subplots(1, 1, figsize=(12, 9))
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Plot the histogram
n, bins, patches = plt.hist(min_ang, bins=nbins, range=(0.0, hist_max),
align='mid', density=True)
# Set the colors
norm = colors.Normalize(min_ang.min(), min_ang.max())
for b, p in zip(bins, patches):
color = plt.cm.coolwarm_r(norm(b))
p.set_facecolor(color)
# Configure the axes
x_ticks = [0, 15, 30, 45, 60, 75, 90]
plt.xticks(x_ticks, x_ticks)
if xmin:
plt.xlim((xmin, 90.0))
else:
plt.xlim((0.0, 90.0))
plt.xlabel('Minimum Angle (deg.)')
ax.yaxis.set_major_formatter(PercentFormatter(xmax=1, decimals=0))
ax.tick_params(which='both', direction='out', top=False, right=False)
plt.tight_layout()
plt.savefig(fname, bbox_inches='tight', pad_inches=0.05)
return
def plotShapeHist(fshape, xmin=None, fname='shape_hist.pdf'):
hist_min = np.amin(fshape)
nbins = 50
#nbins = 40*(hist_min-1)
# Create the figure and set parameters
fig, ax = plt.subplots(1, 1, figsize=(12, 9))
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# Plot the histogram
n, bins, patches = plt.hist(fshape, bins=nbins, range=(xmin, 1.0), \
density=True)
widths = bins[:-1] - bins[1:]
# Set the colors
norm = colors.Normalize(fshape.min(), fshape.max())
for b, p in zip(bins, patches):
color = plt.cm.coolwarm(norm(b))
p.set_facecolor(color)
plt.xlabel('Hexahedron Shape Metric')
ax.yaxis.set_major_formatter(PercentFormatter(xmax=-1/widths[0], decimals=0))
ax.tick_params(which='both', direction='out', top=False, right=False)
plt.tight_layout()
plt.savefig(fname, bbox_inches='tight', pad_inches=0.05,dpi=1000)
return
```
#### File: topology_optimization/bernstein/bernstein.py
```python
from __future__ import print_function
from mpi4py import MPI
from tmr import TMR, TopOptUtils
from paropt import ParOpt
from tacs import TACS, elements, constitutive, functions
import numpy as np
import argparse
import os
class QuadConformCreator(TMR.QuadConformTopoCreator):
"""
This class is called to create a TACSAssembler class with an underlying
filter mesh that conforms with Assembler mesh. The input to the class
consists of the boundary conditions, the forest of quadtrees for the
analysis mesh, the order of the conforming filter QuadForest mesh, and
the type of interpolant to be used.
"""
def __init__(self, bcs, forest, order=2, interp=TMR.BERNSTEIN_POINTS,
design_vars_per_node=1, props=None):
# Store the properties
self.props = props
self.element = None
def createTopoElement(self, order, filtr):
"""
Create an element for the entire mesh
"""
# Create the constitutive object - one for the entire mesh
self.con = TMR.QuadConstitutive(props=self.props, forest=filtr)
# Create the model (the type of physics we're using)
self.model = elements.LinearThermoelasticity2D(self.con)
# Set the basis functions and create the element
if order == 2:
self.basis = elements.LinearQuadBasis()
elif order == 3:
self.basis = elements.QuadraticQuadBasis()
elif order == 4:
self.basis = elements.CubicQuadBasis()
elif order == 5:
self.basis = elements.QuarticQuadBasis()
elif order == 6:
self.basis = elements.QuinticQuadBasis()
# Create the element type
self.element = elements.Element2D(self.model, self.basis)
return
def createElement(self, order, quadrant, index, filtr):
"""
Create the element for the specified quadrant
Args:
order (int): The order of the element
quadrant (TMR.Quadrant): The quadrant to be build for this element
index (list): The global numbers for the quadrant nodes
filtr (QuadForest): The QuadForest for the filter
Returns:
TACS.Element: Element to place within the Assembler
"""
if self.element is None:
self.createTopoElement(order, filtr)
return self.element
class CreatorCallback:
def __init__(self, bcs, props):
self.bcs = bcs
self.props = props
def creator_callback(self, forest):
"""
Given the forest, instantiate a creator class that will populate a
TACSAssembler object with the elements. This allocates the
QuadConformCreator class above and also returns the QuadForest object
associated with the filter. This is needed in the createTopoProblem
call.
"""
order = forest.getMeshOrder()
interp = TMR.BERNSTEIN_POINTS
dvs_per_node = self.props.getDesignVarsPerNode()
creator = QuadConformCreator(self.bcs, forest, order=order-1,
design_vars_per_node=dvs_per_node,
interp=interp, props=self.props)
filtr = creator.getFilter()
return creator, filtr
def create_forest(comm, depth, htarget):
"""
Create an initial forest for analysis and optimization
This code loads in the model, sets names, meshes the geometry and creates
a QuadForest from the mesh. The forest is populated with quadtrees with
the specified depth.
Args:
comm (MPI_Comm): MPI communicator
depth (int): Depth of the initial trees
htarget (float): Target global element mesh size
Returns:
QuadForest: Initial forest for topology optimization
"""
# Load the geometry model
geo = TMR.LoadModel('biclamped_traction.stp')
# Mark the boundary condition faces
verts = geo.getVertices()
edges = geo.getEdges()
faces = geo.getFaces()
edges[1].setName('fixed')
edges[9].setName('fixed')
edges[4].setName('traction')
# Create the mesh
mesh = TMR.Mesh(comm, geo)
# Set the meshing options
opts = TMR.MeshOptions()
# Create the surface mesh
mesh.mesh(htarget, opts)
# Create a model from the mesh
model = mesh.createModelFromMesh()
# Create the corresponding mesh topology from the mesh-model
topo = TMR.Topology(comm, model)
# Create the quad forest and set the topology of the forest
forest = TMR.QuadForest(comm)
forest.setTopology(topo)
# Set parameters for later usage
forest.createTrees(depth)
return forest
def create_problem(forest, bcs, props, nlevels, iter_offset=0,
m_fixed=0.0, use_compliance=False):
"""
Create the TMRTopoProblem object and set up the topology optimization problem.
This code is given the forest, boundary conditions, material properties and
the number of multigrid levels. Based on this info, it creates the TMRTopoProblem
and sets up the mass-constrained compliance minimization problem. Before
the problem class is returned it is initialized so that it can be used for
optimization.
Args:
forest (OctForest): Forest object
bcs (BoundaryConditions): Boundary condition object
props (StiffnessProperties): Material properties object
nlevels (int): number of multigrid levels
Returns:
TopoProblem: Topology optimization problem instance
"""
# Allocate the creator callback function
obj = CreatorCallback(bcs, props)
# Create a conforming filter
filter_type = 'lagrange'
# Characteristic length of the domain
r = 0.06
a = 0.04
area = r*a
r0 = 0.025*np.sqrt(area)
# Create the problem and filter object
problem = TopOptUtils.createTopoProblem(forest, obj.creator_callback, filter_type,
nlevels=nlevels, lowest_order=2,
r0=r0, N=15, use_galerkin=True,
design_vars_per_node=design_vars_per_node)
# Get the assembler object we just created
assembler = problem.getAssembler()
# Get the basis object from one of the elements
elems = assembler.getElements()
basis = elems[0].getElementBasis()
# Create the traction objects that will be used later..
# Fn = 1000e3 # Normal heat flux
Fn = 0.0
Ty = -2.5e6 # Traction force component in the y-direction
vpn = elems[0].getVarsPerNode()
trac = [0.0, Ty, Fn]
tractions = []
for findex in range(4):
tractions.append(elements.Traction2D(vpn, findex, basis, trac))
# Allocate a thermal traction boundary condition
force1 = TopOptUtils.computeTractionLoad('traction', forest, assembler,
tractions)
# Set the load case
problem.setLoadCases([force1])
# Set the constraint functions
funcs = [functions.StructuralMass(assembler)]
# Set the mass constraint
# (m_fixed - m(x))/m_fixed >= 0.0
problem.addConstraints(0, funcs, [-m_fixed], [-1.0/m_fixed])
# Set the values of the objective array
if use_compliance:
obj_array = [ 0.1 ]
compliance = functions.Compliance(assembler)
compliance.setComplianceType(elements.TOTAL_STRAIN_ENERGY_DENSITY)
problem.setObjective(obj_array, [compliance])
else:
obj_array = [ 1.0e3 ]
ksfail = functions.KSFailure(assembler, 10.0)
ksfail.setKSFailureType('continuous')
problem.setObjective(obj_array, [ksfail])
# Initialize the problem and set the prefix
problem.initialize()
# Set the callback for generating output
cb = OutputCallback(assembler, iter_offset=iter_offset)
problem.setOutputCallback(cb.write_output)
return problem
class OutputCallback:
def __init__(self, assembler, iter_offset=0):
self.assembler = assembler
# Set the output file name
flag = (TACS.OUTPUT_CONNECTIVITY |
TACS.OUTPUT_NODES |
TACS.OUTPUT_EXTRAS)
self.f5 = TACS.ToFH5(assembler, TACS.PLANE_STRESS_ELEMENT, flag)
self.iter_offset = iter_offset
def write_output(self, prefix, itr, oct_forest, quad_forest, x):
self.assembler.setVariables(x)
self.f5.writeToFile(os.path.join(prefix,
'output%d.f5'%(itr + self.iter_offset)))
# Set the optimization parameters
optimization_options = {
# Set the algorithm to use
'algorithm': 'tr',
'qn_subspace_size': 10,
'qn_type': 'bfgs',
'qn_diag_type': 'yts_over_sts',
# Parameters for the trust region method
'tr_init_size': 0.01,
'tr_max_size': 0.1,
'tr_min_size': 0.01, # 1e-6,
'tr_eta': 0.1,
'penalty_gamma': 10.0,
'tr_penalty_gamma_max': 1000.0,
'tr_write_output_frequency': 1,
'tr_infeas_tol': 1e-5,
'tr_l1_tol': 1e-5,
'tr_linfty_tol': 0.0, # Don't use the l-infinity norm in the stopping criterion
'tr_steering_barrier_strategy': 'default',
'tr_steering_starting_point_strategy': 'default',
# Parameters for the interior point method (used to solve the
# trust region subproblem)
'abs_res_tol': 1e-8,
'max_major_iters': 100,
'norm_type': 'l1',
'init_barrier_param': 10.0,
'use_line_search': False,
'barrier_strategy': 'mehrotra_predictor_corrector',
'starting_point_strategy': 'affine_step'}
# Create an argument parser to read in arguments from the command line
p = argparse.ArgumentParser()
p.add_argument('--prefix', type=str, default='./results')
p.add_argument('--vol_frac', type=float, default=0.3)
p.add_argument('--htarget', type=float, default=2.5e-3)
p.add_argument('--max_opt_iters', type=int, nargs='+',
default=[5])
p.add_argument('--init_depth', type=int, default=1)
p.add_argument('--mg_levels', type=int, nargs='+', default=[2])
p.add_argument('--order', type=int, default=4)
p.add_argument('--q_penalty', type=float, default=8.0)
args = p.parse_args()
# Set the communicator
comm = MPI.COMM_WORLD
# Print out all of the arguments to the command line
if comm.rank == 0:
for arg in vars(args):
print('%-20s'%(arg), getattr(args, arg))
# Set the max nmber of iterations
mg_levels = args.mg_levels
max_iterations = len(mg_levels)
# The thickness value
t = 0.01
# Compute the volume of the bracket
r = 0.06
a = 0.04
vol = r*a*t
vol_frac = args.vol_frac
# Create the first material properties object
rho = 2600.0*t
E = 70e9*t
nu = 0.3
alpha = 23.5e-6
kcond = 130.0*t
ys = 450e6
mat1 = constitutive.MaterialProperties(rho=rho, E=E,
nu=nu, alpha=alpha/(1.0 - 2*nu),
kappa=kcond, ys=ys)
# Create the second material properties object
rho = 1300.0*t
E = 35e9*t
nu = 0.3
alpha = 0.5*23.5e-6
kcond = 65.0*t
ys = 275e6
mat2 = constitutive.MaterialProperties(rho=rho, E=E,
nu=nu, alpha=alpha/(1.0 - 2*nu),
kappa=kcond, ys=ys)
prop_list = [mat1, mat2]
# Set the fixed mass
average_density = 0.5*(2600.0 + 1300.0)
initial_mass = vol*average_density
m_fixed = vol_frac*initial_mass
# Set the number of variables per node
design_vars_per_node = 1
if (len(prop_list) > 1):
design_vars_per_node = 1 + len(prop_list)
# Create the stiffness properties object
props = TMR.StiffnessProperties(prop_list, q=args.q_penalty, qtemp=0.0,
qcond=0.0, eps=0.3, k0=1e-3)
# Set the boundary conditions for the problem
bcs = TMR.BoundaryConditions()
bcs.addBoundaryCondition('fixed', [0, 1, 2], [0.0, 0.0, 50.0])
# Create the initial forest
forest = create_forest(comm, args.init_depth, args.htarget)
forest.setMeshOrder(args.order, TMR.GAUSS_LOBATTO_POINTS)
# Set the original filter to NULL
orig_filter = None
xopt = None
iter_offset = 0
for step in range(max_iterations):
# Create the TMRTopoProblem instance
nlevels = mg_levels[step]
problem = create_problem(forest, bcs, props, nlevels,
iter_offset=iter_offset, m_fixed=m_fixed,
use_compliance=True)
iter_offset += args.max_opt_iters[step]
problem.setPrefix(args.prefix)
# Check the gradient
problem.checkGradients(1e-6)
# Extract the filter to interpolate design variables
filtr = problem.getFilter()
if orig_filter is not None:
# Create one of the new design vectors
x = problem.createDesignVec()
TopOptUtils.interpolateDesignVec(orig_filter, xopt, filtr, x)
problem.setInitDesignVars(x)
# Set the new original filter
orig_filter = filtr
# Set the max number of iterations
optimization_options['tr_max_iterations'] = args.max_opt_iters[step]
# Output files
optimization_options['output_file'] = os.path.join(args.prefix,
'output_file%d.dat'%(step))
optimization_options['tr_output_file'] = os.path.join(args.prefix,
'tr_output_file%d.dat'%(step))
# Optimize the problem
opt = ParOpt.Optimizer(problem, optimization_options)
opt.optimize()
xopt, z, zw, zl, zu = opt.getOptimizedPoint()
# Refine based solely on the value of the density variable
assembler = problem.getAssembler()
forest = forest.duplicate()
# Perform refinement based on distance
dist_file = os.path.join(args.prefix, 'distance_solution%d.f5'%(step))
domain_length = np.sqrt(r*a)
refine_distance = 0.025*domain_length
TopOptUtils.targetRefine(forest, filtr, assembler, refine_distance,
interface_lev=3, interior_lev=2,
domain_length=domain_length, interface_index=-1,
interior_index=0, reverse=True, filename=dist_file)
# Repartition the mesh
forest.balance(1)
forest.repartition()
```
#### File: topology_optimization/lbracket/lbracket.py
```python
from mpi4py import MPI
from tmr import TMR, TopOptUtils
from paropt import ParOpt
from tacs import TACS, elements, constitutive, functions
import numpy as np
import argparse
import os
def in_domain(x, y):
"""
Check if a point x, y is within the geometry domain
"""
l = 0.1
h = 0.04
xc = x - h
yc = y - h
check = True
if (x > l) or (x < 0.0) or (y > l) or (y < 0.0):
check = False
return check
if (xc > 0.0) & (yc > 0.0):
check = False
return check
def in_circle(x, y, x0, y0, r):
"""
Check if a point (x, y) is in the circle centered at (x0, y0) with
redius r
"""
dr = (x-x0)**2 + (y-y0)**2 - r**2
if dr <= 0:
check = True
else:
check = False
return check
def circle_refine(x0, r, hr, h):
"""
Create a feature size with a circular refinement region using concentric circles
wit radius r, with target mesh size hr in each concentric circle
Args:
x0 (np.array): center of circle where refinement region should be centered
r (np.array): array of circle radii to apply refinement to
-> should be in descending order
hr (np.array): corresponding target h values for within each concentric
circle of radius r
h (float): target refinement outside the circular domain
"""
# Create a grid of points to specify target values
nx = 100
ny = 100
npts = nx*ny
xpts = np.zeros((npts, 3))
x = np.linspace(0.0, 0.1, nx)
y = np.linspace(0.0, 0.1, ny)
del_i = []
for i in range(nx):
for j in range(ny):
if in_domain(x[i], y[j]):
xpts[i*ny+j, 0] = x[i]
xpts[i*ny+j, 1] = y[j]
else:
del_i.append(i*ny+j)
# Remove the region outside the domain
xpts = np.delete(xpts, del_i, 0)
# Create the refinement array
hvals = h*np.ones(len(xpts))
for i in range(len(xpts)):
for hi, ri in zip(hr, r):
if in_circle(xpts[i, 0], xpts[i, 1], x0[0], x0[1], ri):
hvals[i] = hi
xpts = np.ascontiguousarray(xpts)
hmin = np.amin(hr)
fs = TMR.PointFeatureSize(xpts, hvals, hmin, h)
return fs
class QuadConformCreator(TMR.QuadConformTopoCreator):
"""
This class is called to create a TACSAssembler class with an underlying
filter mesh that conforms with Assembler mesh. The input to the class
consists of the boundary conditions, the forest of quadtrees for the
analysis mesh, the order of the conforming filter QuadForest mesh, and
the type of interpolant for the filter.
"""
def __init__(self, bcs, forest, order=2, interp=TMR.BERNSTEIN_POINTS,
design_vars_per_node=1, props=None):
# Store the properties
self.props = props
self.element = None
def createTopoElement(self, order, filtr):
"""
Create an element for the entire mesh
"""
# Create the constitutive object - one for the entire mesh
self.con = TMR.QuadConstitutive(props=self.props, forest=filtr)
# Create the model
self.model = elements.LinearElasticity2D(self.con)
# Set the basis functions and create the element
if order == 2:
self.basis = elements.LinearQuadBasis()
elif order == 3:
self.basis = elements.QuadraticQuadBasis()
elif order == 4:
self.basis = elements.CubicQuadBasis()
elif order == 5:
self.basis = elements.QuarticQuadBasis()
elif order == 6:
self.basis = elements.QuinticQuadBasis()
# Create the element type
self.element = elements.Element2D(self.model, self.basis)
return
def createElement(self, order, quadrant, index, filtr):
"""
Create the element for the specified quadrant
Args:
order (int): The order of the element
quadrant (TMR.Quadrant): The quadrant to be build for this element
index (list): The global numbers for the quadrant nodes
filtr (QuadForest): The QuadForest for the filter
Returns:
TACS.Element: Element to place within the Assembler
"""
if self.element is None:
self.createTopoElement(order, filtr)
return self.element
class CreatorCallback:
def __init__(self, bcs, props):
self.bcs = bcs
self.props = props
def creator_callback(self, forest):
"""
Given the forest, instantiate a creator class that will populate a
TACSAssembler object with the elements. This allocates the
QuadConformCreator class above and also returns the QuadForest object
associated with the filter. This is needed in the createTopoProblem
call.
"""
order = forest.getMeshOrder()
interp = TMR.BERNSTEIN_POINTS
dvs_per_node = self.props.getDesignVarsPerNode()
creator = QuadConformCreator(self.bcs, forest, order=order,
design_vars_per_node=dvs_per_node,
interp=interp, props=self.props)
filtr = creator.getFilter()
return creator, filtr
def create_forest(comm, depth, htarget, fs_type=None,
filename='2d-bracket-fillet.stp'):
"""
Create an initial forest for analysis and optimization
This code loads in the model, sets names, meshes the geometry and creates
a QuadForest from the mesh. The forest is populated with quadtrees with
the specified depth.
Args:
comm (MPI_Comm): MPI communicator
depth (int): Depth of the initial trees
htarget (float): Target global element mesh size
fs_type (string): feature size refinement to use ('box', or 'point')
Returns:
QuadForest: Initial forest for topology optimization
"""
# Load in the L-bracket model
geo = TMR.LoadModel(filename)
verts = geo.getVertices()
edges = geo.getEdges()
faces = geo.getFaces()
geo = TMR.Model(verts, edges, faces)
# Set the edges
edges[5].setName('fixed')
edges[1].setName('traction')
# Create the mesh
mesh = TMR.Mesh(comm, geo)
if fs_type == 'box':
hmin = htarget/4.0
hmax = htarget
pt1 = [0.03, 0.03, -1]
pt2 = [0.05, 0.05, 1]
box = TMR.BoxFeatureSize(pt1, pt2, hmin, hmax)
box.addBox(pt1, pt2, hmin)
# Set the meshing options
opts = TMR.MeshOptions()
# Create the surface mesh
mesh.mesh(opts=opts, fs=box)
elif fs_type == 'point':
x0 = np.array([0.04, 0.04])
ncircles = 10
r = np.linspace(0.04, 0.01, ncircles)
hr = htarget*np.linspace(1.0, 0.25, ncircles)
#np.array([htarget/2.0, htarget/4.0])
circle = circle_refine(x0, r, hr, htarget)
# Set the meshing options
opts = TMR.MeshOptions()
# Create the surface mesh
mesh.mesh(opts=opts, fs=circle)
else:
# Set the meshing options
opts = TMR.MeshOptions()
# Create the surface mesh
mesh.mesh(htarget, opts=opts)
# Create a model from the mesh
model = mesh.createModelFromMesh()
# Create the corresponding mesh topology from the mesh-model
topo = TMR.Topology(comm, model)
# Create the quad forest and set the topology of the forest
forest = TMR.QuadForest(comm)
forest.setTopology(topo)
# Set parameters for later usage
forest.createTrees(depth)
return forest
class MFilterCreator:
def __init__(self, r0_frac, N, a=0.1):
self.a = a
self.r0_frac = r0_frac
self.N = N
def filter_callback(self, assemblers, filters):
"""
Create and initialize a filter with the specified parameters
"""
# Find the characteristic length of the domain and set the filter length scale
r0 = self.r0_frac*self.a
mfilter = TopOptUtils.Mfilter(self.N, assemblers, filters, dim=2, r=r0)
mfilter.initialize()
return mfilter
def create_problem(forest, bcs, props, nlevels,
r0_frac=0.05, N=20, iter_offset=0, m_fixed=0.0):
"""
Create the TMRTopoProblem object and set up the topology optimization problem.
This code is given the forest, boundary conditions, material properties and
the number of multigrid levels. Based on this info, it creates the TMRTopoProblem
and sets up the mass-constrained compliance minimization problem. Before
the problem class is returned it is initialized so that it can be used for
optimization.
Args:
forest (OctForest): Forest object
bcs (BoundaryConditions): Boundary condition object
props (StiffnessProperties): Material properties object
nlevels (int): number of multigrid levels
r0_frac (float): Fraction of the characteristic domain length
N (int): Number of iterations of the discrete filter
iter_offset (int): Iteration offset counter
m_fixed (float): Fixed mass fraction
Returns:
TopoProblem: Topology optimization problem instance
"""
# Allocate the creator callback function
obj = CreatorCallback(bcs, props)
# Create a discrete M-filter
mfilter = MFilterCreator(r0_frac, N)
filter_type = mfilter.filter_callback
# Create the problem and filter object
problem = TopOptUtils.createTopoProblem(forest, obj.creator_callback, filter_type,
nlevels=nlevels, lowest_order=2,
use_galerkin=True,
design_vars_per_node=1)
# Get the assembler object we just created
assembler = problem.getAssembler()
# Get the basis object from one of the elements
elems = assembler.getElements()
basis = elems[0].getElementBasis()
# Create the traction objects that will be used later..
Ty = -2.5e6 # Traction force component in the y-direction
vpn = elems[0].getVarsPerNode()
trac = [0.0, Ty]
tractions = []
for findex in range(4):
tractions.append(elements.Traction2D(vpn, findex, basis, trac))
# Allocate a thermal traction boundary condition
force1 = TopOptUtils.computeTractionLoad('traction', forest, assembler,
tractions)
# Set the load case
problem.setLoadCases([force1])
# Set the constraint functions
funcs = [functions.StructuralMass(assembler)]
# Set the mass constraint
# (m_fixed - m(x))/m_fixed >= 0.0
problem.addConstraints(0, funcs, [-m_fixed], [-1.0/m_fixed])
# Set the values of the objective array
obj_array = [ 1.0 ]
ksfail = functions.KSFailure(assembler, 100.0)
ksfail.setKSFailureType('discrete')
problem.setObjective(obj_array, [ksfail])
# Initialize the problem and set the prefix
problem.initialize()
return problem
class OutputCallback:
def __init__(self, prefix, assembler, iter_offset=0, freq=10):
self.prefix = prefix
self.freq = freq
self.iter_offset = iter_offset
# Set the output file name
flag = (TACS.OUTPUT_CONNECTIVITY |
TACS.OUTPUT_NODES |
TACS.OUTPUT_DISPLACEMENTS |
TACS.OUTPUT_STRAINS |
TACS.OUTPUT_EXTRAS)
self.f5 = TACS.ToFH5(assembler, TACS.PLANE_STRESS_ELEMENT, flag)
def write_output(self, prefix, itr, oct_forest, quad_forest, x):
if itr % self.freq == 0:
self.f5.writeToFile(os.path.join(self.prefix,
'output%d.f5'%(itr + self.iter_offset)))
return
# Set the optimization parameters
optimization_options = {
# Set the algorithm to use
'algorithm': 'tr',
# Use a sequential linear trust-region method
'qn_type': 'none',
'sequential_linear_method': True,
# Use a BFGS method with skipped updates
# 'qn_type': 'bfgs',
# 'qn_subspace_size': 0,
# Parameters for the trust region method
'tr_init_size': 0.01,
'tr_max_size': 0.05,
'tr_min_size': 1e-5,
'tr_eta': 0.1,
'penalty_gamma': 5.0,
'tr_penalty_gamma_max': 5.0,
'tr_write_output_frequency': 1,
'tr_infeas_tol': 1e-5,
'tr_l1_tol': 1e-3,
'tr_linfty_tol': 0.0, # Don't use the l-infinity norm in the stopping criterion
'tr_adaptive_gamma_update': False,
'tr_steering_barrier_strategy': 'default',
'tr_steering_starting_point_strategy': 'default',
# Parameters for the interior point method (used to solve the
# trust region subproblem)
'abs_res_tol': 1e-10,
'max_major_iters': 100,
'norm_type': 'l1',
'init_barrier_param': 10.0,
'use_line_search': False,
'barrier_strategy': 'mehrotra_predictor_corrector',
'starting_point_strategy': 'affine_step'}
if __name__ == '__main__':
# Create an argument parser to read in arguments from the command line
p = argparse.ArgumentParser()
p.add_argument('--prefix', type=str, default='./results')
p.add_argument('--vol_frac', type=float, default=0.3)
p.add_argument('--htarget', type=float, default=2.5e-3)
p.add_argument('--max_iters', type=int, default=1)
p.add_argument('--max_opt_iters', type=int, default=300)
p.add_argument('--init_depth', type=int, default=1)
p.add_argument('--mg_levels', type=int, default=3)
p.add_argument('--order', type=int, default=2)
p.add_argument('--q_penalty', type=float, default=8.0)
p.add_argument('--N', type=int, default=10)
p.add_argument('--r0_frac', type=float, default=0.05)
p.add_argument('--use_project', action='store_true', default=False)
p.add_argument('--use_simp', action='store_true', default=False)
p.add_argument('--fs_type', type=str, default='None',
help='feature size refinement type: point, box, or None')
args = p.parse_args()
# Set the communicator
comm = MPI.COMM_WORLD
# Print out all of the arguments to the command line
if comm.rank == 0:
for arg in vars(args):
print('%-20s'%(arg), getattr(args, arg))
# Ensure that the prefix directory exists
if comm.rank == 0 and not os.path.isdir(args.prefix):
os.mkdir(args.prefix)
# Set a barrier here
comm.Barrier()
# Create the first material properties object
rho = 2600.0
E = 70e9
nu = 0.3
ys = 100e6
mat = constitutive.MaterialProperties(rho=rho, E=E, nu=nu, ys=ys)
# Set the fixed mass
a = 0.1
b = (2.0/5.0)*a
area = a**2 - (a - b)**2
domain_length = a
full_mass = area*rho
m_fixed = args.vol_frac*full_mass
# Create the stiffness properties object
penalty_type = 'RAMP'
if args.use_simp:
penalty_type = 'SIMP'
props = TMR.StiffnessProperties(mat, q=args.q_penalty, qcond=args.q_penalty,
eps=0.05, k0=1e-6, penalty_type=penalty_type,
beta=10.0, use_project=args.use_project)
# Set the boundary conditions for the problem
bcs = TMR.BoundaryConditions()
bcs.addBoundaryCondition('fixed', [0, 1], [0.0, 0.0])
# Create the initial forest
forest = create_forest(comm, args.init_depth, args.htarget,
fs_type=args.fs_type)
forest.writeToVTK(os.path.join(args.prefix, 'forest.vtk'))
forest.setMeshOrder(args.order, TMR.GAUSS_LOBATTO_POINTS)
# Set the original filter to NULL
orig_filter = None
xopt = None
iter_offset = 0
max_iterations = args.max_iters
for step in range(max_iterations):
# Create the TMRTopoProblem instance
mg_levels = args.mg_levels
if step > 0:
mg_levels += 1
problem = create_problem(forest, bcs, props, mg_levels, m_fixed=m_fixed,
r0_frac=args.r0_frac, N=args.N,
iter_offset=iter_offset)
# Get the assembler object
assembler = problem.getAssembler()
# Set the callback for generating output
cb = OutputCallback(args.prefix, assembler, iter_offset=iter_offset)
problem.setOutputCallback(cb.write_output)
# Keep counting the total number of iterations
iter_offset += args.max_opt_iters
problem.setPrefix(args.prefix)
# Check the gradient
problem.checkGradients(1e-6)
# Test the element implementation
if comm.rank == 0:
assembler.testElement(0, 2)
# Extract the filter to interpolate design variables
filtr = problem.getFilter()
if orig_filter is not None:
# Create one of the new design vectors
x = problem.createDesignVec()
TopOptUtils.interpolateDesignVec(orig_filter, xopt, filtr, x)
problem.setInitDesignVars(x)
# Set the new original filter
orig_filter = filtr
# Set parameters
optimization_options['tr_max_iterations'] = args.max_opt_iters
optimization_options['output_file'] = os.path.join(args.prefix,
'output_file%d.dat'%(step))
optimization_options['tr_output_file'] = os.path.join(args.prefix,
'tr_output_file%d.dat'%(step))
# Optimize the problem
opt = ParOpt.Optimizer(problem, optimization_options)
opt.optimize()
xopt, z, zw, zl, zu = opt.getOptimizedPoint()
# Refine based solely on the value of the density variable
assembler = problem.getAssembler()
forest = forest.duplicate()
# Output the original design variables before filtering
rho_vec = assembler.createDesignVec()
assembler.getDesignVars(rho_vec)
x_vec = TMR.convertPVecToVec(xopt)
assembler.setDesignVars(x_vec)
# visualize
flag = (TACS.OUTPUT_CONNECTIVITY |
TACS.OUTPUT_NODES |
TACS.OUTPUT_EXTRAS)
f5 = TACS.ToFH5(assembler, TACS.PLANE_STRESS_ELEMENT, flag)
f5.writeToFile(os.path.join(args.prefix, 'dv_output%d.f5'%(step)))
# Set the tacs design vars back to the interpolated densities
assembler.setDesignVars(rho_vec)
# Perform refinement based on distance
dist_file = os.path.join(args.prefix, 'distance_solution%d.f5'%(step))
refine_distance = 0.025*domain_length
# TopOptUtils.targetRefine(forest, filtr, assembler, refine_distance,
# interface_lev=args.init_depth+1, interior_lev=args.init_depth,
# domain_length=domain_length, filename=dist_file)
TopOptUtils.approxDistanceRefine(forest, filtr, assembler, refine_distance,
domain_length=domain_length, filename=dist_file)
# Repartition the mesh
forest.balance(1)
forest.repartition()
```
#### File: tmr/tmr/bowyer_watson.py
```python
import numpy as np
import matplotlib.pylab as plt
import sys
import cProfile
class quadnode:
def __init__(self, low, high, u=0, v=0, level=0):
# Set the coordinate direction
self.low = low
self.high = high
# Record the lower right-hand parametric point
self.u = u
self.v = v
# Set the level
self.level = level
# Set the maximum quad length and the edge length
hmax = 1 << 30
self.h = 1 << 30 - (self.level+1)
# Set the value of alpha
ax = (1.0*(self.u + self.h))/hmax
ay = (1.0*(self.v + self.h))/hmax
# Set the new point location
self.x = (1.0 - ax)*low[0] + ax*high[0]
self.y = (1.0 - ay)*low[1] + ay*high[1]
# The left and right nodes
self.low_left = None
self.low_right = None
self.up_left = None
self.up_right = None
# Keep track of the points
self.pts = {}
return
def add_point(self, num, pt):
'''Add the point to the left or right node'''
if self.low_left is not None:
if pt[0] < self.x and pt[1] < self.y:
self.low_left.add_point(num, pt)
elif pt[0] < self.x:
self.up_left.add_point(num, pt)
elif pt[1] < self.y:
self.low_right.add_point(num, pt)
else:
self.up_right.add_point(num, pt)
return
# Else add the point
self.pts[num] = np.array(pt)
# Check if we need to split
if len(self.pts) > 10:
# Current (u, v) location for the lower part of the mesh
u = self.u
v = self.v
# Side length for the mesh
h = self.h
self.low_left = quadnode(self.low, self.high,
u=u, v=v, level=self.level+1)
self.low_right = quadnode(self.low, self.high,
u=u+h, v=v, level=self.level+1)
self.up_left = quadnode(self.low, self.high,
u=u, v=v+h, level=self.level+1)
self.up_right = quadnode(self.low, self.high,
u=u+h, v=v+h, level=self.level+1)
# Add the points to the left or right nodes, respectively
for key in self.pts.keys():
item = self.pts.pop(key)
self.add_point(key, item)
return
def delete_point(self, num, pt):
'''Free the point - the number is matched, not the point value'''
if self.low_left is not None:
if pt[0] < self.x and pt[1] < self.y:
return self.low_left.delete_point(num, pt)
elif pt[0] < self.x:
return self.up_left.delete_point(num, pt)
elif pt[1] < self.y:
return self.low_right.delete_point(num, pt)
else:
return self.up_right.delete_point(num, pt)
# Else add the point
if num in self.pts:
item = self.pts.pop(num)
return True
return False
def find_closest(self, pt):
'''Find the closest point'''
if self.low_left is not None:
if pt[0] < self.x and pt[1] < self.y:
num, d = self.low_left.find_closest(pt)
if self.x - pt[0] <= d:
num2, d2 = self.low_right.find_closest(pt)
if d2 < d: num, d = num2, d2
if self.y - pt[1] <= d:
num2, d2 = self.up_left.find_closest(pt)
if d2 < d: num, d = num2, d2
if self.x - pt[0] <= d or self.y - pt[1] <= d:
num2, d2 = self.up_right.find_closest(pt)
if d2 < d: num, d = num2, d2
elif pt[0] < self.x:
num, d = self.up_left.find_closest(pt)
if self.x - pt[0] <= d:
num2, d2 = self.up_right.find_closest(pt)
if d2 < d: num, d = num2, d2
if pt[1] - self.y <= d:
num2, d2 = self.low_left.find_closest(pt)
if d2 < d: num, d = num2, d2
if self.x - pt[0] <= d or pt[1] - self.y <= d:
num2, d2 = self.low_right.find_closest(pt)
if d2 < d: num, d = num2, d2
elif pt[1] < self.y:
num, d = self.low_right.find_closest(pt)
if pt[0] - self.x <= d:
num2, d2 = self.low_left.find_closest(pt)
if d2 < d: num, d = num2, d2
if self.y - pt[1] <= d:
num2, d2 = self.up_right.find_closest(pt)
if d2 < d: num, d = num2, d2
if pt[0] - self.x <= d or self.y - pt[1] <= d:
num2, d2 = self.up_left.find_closest(pt)
if d2 < d: num, d = num2, d2
else:
num, d = self.up_right.find_closest(pt)
if pt[0] - self.x <= d:
num2, d2 = self.up_left.find_closest(pt)
if d2 < d: num, d = num2, d2
if pt[1] - self.y <= d:
num2, d2 = self.low_right.find_closest(pt)
if d2 < d: num, d = num2, d2
if pt[0] - self.x <= d or pt[1] - self.y <= d:
num2, d2 = self.low_left.find_closest(pt)
if d2 < d: num, d = num2, d2
return num, d
# This is a leaf, search the points
dmin = 1e40
num = -1
for key in self.pts:
d = np.sqrt(np.dot(self.pts[key] - pt, self.pts[key] - pt))
if d < dmin:
dmin = d
num = key
return num, dmin
class triangluate:
def __init__(self, pts, segs, hole=None, maxd=2):
self.tri_key = 0
self.tris = {}
self.edge_to_tris = {}
# What triangles have we deleted lately?
self.deleted_tris = []
# Keep track of one adjacent triangle to each node
self.adjacent_tris = {}
# Set the initial points
self.pts = [np.array((-maxd, -maxd)),
np.array((maxd, -maxd)),
np.array((maxd, maxd)),
np.array((-maxd, maxd))]
# Add the initial triangles
self.add_triangle(0, 1, 3)
self.add_triangle(3, 1, 2)
# Create the quadtree root node
self.root = quadnode([-maxd, -maxd], [maxd, maxd])
for i in xrange(len(self.pts)):
self.root.add_point(i, self.pts[i])
# If there is a hole, add it
offset = 4
self.hole_number = -1
if hole is not None:
self.hole_number = len(self.pts)
offset = 5
# Set the edges that shall not be crossed. These must be initialized
# before we can call add_vertex()
self.pslg_edges = []
for s in segs:
self.pslg_edges.append((s[0]+offset,s[1]+offset))
self.pslg_edges.append((s[1]+offset,s[0]+offset))
if hole is not None:
self.add_vertex(np.array(hole))
# Add the vertices in the
for pt in pts:
self.add_vertex(np.array(pt))
# Remove points from the list
for i in xrange(offset):
self.root.delete_point(i, self.pts[i])
# Clean things up, removing the holes/triangles
for t in self.tris.values():
if 0 in t or 1 in t or 2 in t or 3 in t:
self.delete_triangle(t[0], t[1], t[2])
# Remove all of the holes
for t in self.tris.values():
if self.hole_number in t:
self.remove_hole(t[0], t[1], t[2])
return
def remove_hole(self, u, v, w):
'''Remove the specified hole'''
self.delete_triangle(u, v, w)
if (u, v) not in self.pslg_edges:
x = self.adjacent(v, u)
if x is not None:
self.remove_hole(v, u, x)
if (v, w) not in self.pslg_edges:
x = self.adjacent(w, v)
if x is not None:
self.remove_hole(w, v, x)
if (w, u) not in self.pslg_edges:
x = self.adjacent(u, w)
if x is not None:
self.remove_hole(u, w, x)
return
def add_triangle(self, u, v, w):
'''Add the triangle uvw to the triangle list'''
key = 1*self.tri_key
if (u,v) in self.edge_to_tris:
raise ValueError('Edge 1 (%d, %d) already exists for triangle (%d, %d, %d)'%(
u,v,t[0],t[1],t[2]))
else:
self.edge_to_tris[(u,v)] = key
if (v,w) in self.edge_to_tris:
raise ValueError('Edge 2 (%d, %d) already exists for triangle (%d, %d, %d)'%(
v,w,t[0],t[1],t[2]))
else:
self.edge_to_tris[(v,w)] = key
if (w,u) in self.edge_to_tris:
raise ValueError('Edge 3 (%d, %d) already exists for triangle (%d, %d, %d)'%(
w,u,t[0],t[1],t[2]))
else:
self.edge_to_tris[(w,u)] = key
# Set the adjacent triangle to this node
self.adjacent_tris[u] = key
self.adjacent_tris[v] = key
self.adjacent_tris[w] = key
# Add the triangle itself
self.tris[key] = (u,v,w)
self.tri_key += 1
return
def delete_triangle(self, u, v, w):
'''Delete the enclosing triangle'''
if (u,v) in self.edge_to_tris:
self.edge_to_tris.pop((u,v))
self.edge_to_tris.pop((v,w))
key = self.edge_to_tris.pop((w,u))
self.tris.pop(key)
self.deleted_tris.append(key)
return
def adjacent(self, u, v):
'''Get the triangle that completes the given edge'''
if (u,v) in self.edge_to_tris:
t = self.tris[self.edge_to_tris[(u,v)]]
if t[0] == u and t[1] == v:
return t[2]
elif t[1] == u and t[2] == v:
return t[0]
elif t[2] == u and t[0] == v:
return t[1]
return None
def get_triangle(self, u, v):
'''Get the unique triangle index associated with the edge'''
if (u,v) in self.edge_to_tris:
return self.edge_to_tris[(u,v)]
return None
def incircle(self, pt, t):
'''Check if the point is within the circle or not'''
a = [(self.pts[v][0] - pt[0]) for v in t]
b = [(self.pts[v][1] - pt[1]) for v in t]
c = [a[i]**2 + b[i]**2 for i in range(3)]
A = np.vstack((a, b, c)).T # The 3x3 matrix to check
return np.linalg.det(A) >= 0.0
def orient2d(self, a, b, pt):
'''Check the relative orientation of the points a, b, and pt'''
A = np.array([
[self.pts[a][0] - pt[0], self.pts[a][1] - pt[1]],
[self.pts[b][0] - pt[0], self.pts[b][1] - pt[1]]])
# This is NOT robust to precision errors
return A[0,0]*A[1,1] - A[0,1]*A[1,0] >= 0.0
def enclosed(self, u, v, w, pt):
'''Does this triangle enclose this point?'''
if (self.orient2d(u, v, pt) and
self.orient2d(v, w, pt) and
self.orient2d(w, u, pt)):
return True
return False
def find_encircled(self, pt):
'''Find the first triangle that encloses the point'''
for t in self.tris.values():
if self.incircle(pt, t):
return t
return None
def find_enclosing(self, pt):
'''Find the first triangle that encloses the point'''
# The closest has node to the given point
num, dist = self.root.find_closest(pt)
# Search for nodes that are adjacent to this guy
tri_num = self.adjacent_tris[num]
# Access the nodes in the triangle
t = self.tris[tri_num]
# Find the orientation of the triangle relative to the
# node number 'num'
if t[0] == num:
u, v, w = t
elif t[1] == num:
w, u, v = t
else:
v, w, u = t
winit = w
# Search the triangles adjacent to this one to find the
# enclosed point
while True:
if self.enclosed(u, v, w, pt):
return u, v, w
# Otherwise, find the next triangle that has u in it
x = self.adjacent(u, w)
if x is None:
break
# Update the v's and w's to march over the triangle
v = w
w = x
if w == winit:
break
# Walk the mesh from here...
for t in self.tris.values():
if self.enclosed(t[0], t[1], t[2], pt):
return t
return None
def dig_cavity(self, u, v, w):
'''
u is the index of a new point to be inserted. Is the
oriented triangle (u,v,w) Delaunay or not?
'''
if (w,v) in self.pslg_edges:
self.add_triangle(u, v, w)
return
# Get the adjacent node
x = self.adjacent(w, v)
if x is not None:
if self.incircle(self.pts[u], (w, v, x)):
self.delete_triangle(w, v, x)
self.dig_cavity(u, v, x)
self.dig_cavity(u, x, w)
return
self.add_triangle(u, v, w)
return
def add_vertex(self, pt):
'''
Add the vertex to the underlying Delaunay triangularization
'''
# Find the enclosing triangle
t = self.find_enclosing(pt)
# Add the point to the quadtree and point list
u = len(self.pts) # Pt index
self.root.add_point(u, pt)
self.pts.append(pt)
if t is not None:
v, w, x = t
self.delete_triangle(v, w, x)
self.dig_cavity(u, v, w)
self.dig_cavity(u, w, x)
self.dig_cavity(u, x, v)
return
def add_vertex_frontal(self, pt, t):
'''Add the vertex contained within the specified triangle'''
u = len(self.pts) # Pt index
self.root.add_point(u, pt)
self.pts.append(pt)
if t is not None:
v, w, x = t
self.delete_triangle(v, w, x)
self.dig_cavity(u, v, w)
self.dig_cavity(u, w, x)
self.dig_cavity(u, x, v)
return
def circumcircle(self, t):
''' Compute the circumcircle radii for this set of triangles'''
p = [self.pts[v] for v in t]
r1 = np.sqrt((p[1][0] - p[0][0])**2 + (p[1][1] - p[0][1])**2)
r2 = np.sqrt((p[2][0] - p[1][0])**2 + (p[2][1] - p[1][1])**2)
r3 = np.sqrt((p[2][0] - p[0][0])**2 + (p[2][1] - p[0][1])**2)
return max(r1, r2, r3)
def computeIntersection(self, m, e, u, v, w):
'''
Compute the distance to the intersection of the line m + alpha*e with the
two lines from u to w and v to w
'''
# m + alpha*e = pts[u] + beta*(pts[w] - pts[u])
# => alpha*e + beta*(pts[u] - pts[w]) = pts[u] - m
a2 = [self.pts[u][i] - self.pts[w][i] for i in range(2)]
# Check if the line is orthogonal to this direction
if e[0]*a2[1] - e[1]*a2[0] != 0.0:
b = [self.pts[u][i] - m[i] for i in range(2)]
A = np.vstack((e, a2)).T
ans = np.linalg.solve(A, b)
# If beta is on the interval [0,1], alpha is the distance
if ans[1] >= 0.0 and ans[1] <= 1.0:
return ans[0]
# If we get here and there's no solution, then we have a degenerate triangle
b = [self.pts[v][i] - m[i] for i in range(2)]
a2 = [self.pts[v][i] - self.pts[w][i] for i in range(2)]
A = np.vstack((e, a2)).T
ans = np.linalg.solve(A, b)
# If beta is on the interval [0,1], alpha is the distance
if ans[1] >= 0.0 and ans[1] <= 1.0:
return ans[0]
return -1.0
def frontal(self, h, plot=False, freq=50):
'''Refine the triangles using the frontal method'''
# The length of the edge of the triangle
de = 0.5*np.sqrt(3.0)*h
# Add all of the triangles to the active set
status = {}
circumcircles = {}
for key in self.tris:
status[key] = 'waiting'
# Check whether the triangle should be labeled as active
t = self.tris[key]
u, v, w = t
for e in [(u,v), (v,w), (w,u)]:
if e in self.pslg_edges:
status[key] = 'active'
# Compute the circumcircles
for key in self.tris:
circumcircles[key] = self.circumcircle(self.tris[key])
# Compute the circumcircle radii of all triangles
itr = 0
while 'active' in status.values():
if itr % freq == 0:
print 'iteration = %d'%(itr)
if plot:
self.status = status
self.plot()
plt.show()
itr += 1
# Find the wors ttriangle
rmax = 0.0
tri = -1
# If we maintained a sorted list of the worst offenders, this
# could be faster
for key in circumcircles:
if status[key] == 'active' and circumcircles[key] > rmax:
rmax = circumcircles[key]
tri = key
# Now insert a new point based on the Voronoi criteria
# Determine the triangle's accepted or boundary edge
t = self.tris[tri]
# Check if we are along an edge of the PSLG
edge = None
u, v, w = t
for e in [(u,v), (v,w), (w,u)]:
if e in self.pslg_edges:
edge = e
# Check whether one of the adjacent edges is done
if edge is None:
for e in [(u,v), (v,w), (w,u)]:
index = self.get_triangle(e[1], e[0])
if index is not None and status[index] == 'done':
edge = e
break
# Compute the location of the new point
# | i j k |
# | dx dy 0 | = i*dy - j*dx
# | 0 0 1 |
u, v = edge
w = self.adjacent(u, v)
m = 0.5*np.array(
[self.pts[u][0] + self.pts[v][0],
self.pts[u][1] + self.pts[v][1]])
e = -np.array(
[self.pts[v][1] - self.pts[u][1],
self.pts[u][0] - self.pts[v][0]])
# Compute half the distance between the u and v points
p = 0.5*np.sqrt(np.sum(e**2))
e = 0.5*e/p
# Try the size-optimal point first
pt = m + de*e
# Find the enclosing triangle for the
if not self.enclosed(t[0], t[1], t[2], pt):
t = self.find_enclosing(pt)
if t is None:
# Pick a point that is actually in the current triangle
q = self.computeIntersection(m, e, u, v, w)
rho = 0.5*q
pt = m + rho*e
t = self.tris[tri]
# Use the edge for this triangle to determine where to insert
# the new point
old_tri = 1*self.tri_key
self.deleted_tris = []
self.add_vertex_frontal(pt, t)
new_tri = self.tri_key
# Free the deleted triangles
for key in self.deleted_tris:
circumcircles.pop(key)
status.pop(key)
# Compute the circumcircles of the new triangles and check
# whether they belong in the done category or not...
for key in range(old_tri, new_tri):
circumcircles[key] = self.circumcircle(self.tris[key])
if circumcircles[key] <= 1.5*h:
status[key] = 'done'
else:
status[key] = 'waiting'
# Mark the triangle with the edge that was just computed
# as being done, regardless of whether it is or not...
# Otherwise the same extrapolation point will be used which
# will duplicate points within the domain leading to chaos!
if (u,v) in self.edge_to_tris:
tri = self.edge_to_tris[(u,v)]
status[tri] = 'done'
# Label the new triangles with active status
for key in range(old_tri, new_tri):
if status[key] == 'done':
continue
# Extract the triangle
t = self.tris[key]
u, v, w = t
# Check the adjacent triangles
for edge in [(u,v), (v,w), (w,u)]:
index = self.get_triangle(edge[1], edge[0])
if index is not None and status[index] == 'done':
status[key] = 'active'
break
if edge in self.pslg_edges:
status[key] = 'active'
break
self.status = status
return
def plot(self):
'''Plot the Delaunay triangularization'''
plt.figure()
for edge in self.pslg_edges:
x = [self.pts[v][0] for v in edge]
y = [self.pts[v][1] for v in edge]
plt.plot(x, y, 'b', linewidth=2)
for key in self.tris:
t = self.tris[key]
t2 = [v for v in t]
t2.append(t[0])
x = [self.pts[v][0] for v in t2]
y = [self.pts[v][1] for v in t2]
if hasattr(self, 'status'):
if key in self.status.keys():
if self.status[key] == 'active':
plt.fill(x, y, 'b', alpha=0.2, edgecolor='r')
elif self.status[key] == 'done':
plt.fill(x, y, 'r', alpha=0.2, edgecolor='r')
elif self.status[key] == 'waiting':
plt.fill(x, y, 'g', alpha=0.2, edgecolor='r')
else:
plt.fill(x, y, 'y', alpha=0.2, edgecolor='k')
else:
plt.fill(x, y, 'g', alpha=0.2, edgecolor='k')
for pt in self.pts[4:]:
plt.plot(pt[0], pt[1], 'ro')
return
# Create a list of points that forms
h = 0.75
r1 = 1.5
r2 = 0.75
r3 = 0.2
hole = None
# The list of points and segments
pts = []
segs = []
if 'circle' in sys.argv:
# Create the points for the outer circle
npts = (int)((2*r1*np.pi)/h)
h = 2*np.pi*r1/npts
u = np.linspace(0, 2*np.pi, npts+1)[:-1]
for i in xrange(npts):
pts.append([r1*np.cos(u[i]), r1*np.sin(u[i])])
if i == npts-1:
segs.append([i, 0])
else:
segs.append([i, i+1])
elif 'annulus' in sys.argv:
hole = [0, 0]
# Create the points for the outer circle
npts = (int)((2*r1*np.pi)/h)
h = 2*np.pi*r1/npts
u = np.linspace(0, 2*np.pi, npts+1)[:-1]
for i in xrange(npts):
pts.append([r1*np.cos(u[i]), r1*np.sin(u[i])])
if i == npts-1:
segs.append([i, 0])
else:
segs.append([i, i+1])
offset = npts
# Create the points for the inner circle
npts = (int)((2*r2*np.pi)/h)
u = np.linspace(0, 2*np.pi, npts+1)[:-1]
for i in xrange(npts):
pts.append([r2*np.cos(u[-1-i]), r2*np.sin(u[-1-i])])
if i == npts-1:
segs.append([offset+i, offset])
else:
segs.append([offset+i, offset+i+1])
elif 'triangle' in sys.argv:
npts = (int)(2*r1/h)
u = np.linspace(-r1, r1, npts+1)
for i in xrange(npts):
pts.append([u[i], -r1])
for i in xrange(npts):
pts.append([r1, u[i]])
for i in xrange(npts):
v = u[-1-i]
x = v + 0.1*(v-r1)*(v+r1)
y = v - 0.1*(v-r1)*(v+r1)
pts.append([x, y])
for i in xrange(3*npts):
segs.append([i, i+1])
segs[-1][1] = 0
else:
npts = (int)(2*r1/h)
u = np.linspace(-r1, r1, npts+1)
for i in xrange(npts):
pts.append([u[i], -r1])
for i in xrange(npts):
pts.append([r1, u[i]])
for i in xrange(npts):
pts.append([u[-1-i], r1])
for i in xrange(npts):
pts.append([-r1, u[-1-i]])
for i in xrange(4*npts):
segs.append([i, i+1])
segs[-1][1] = 0
hole = [0, 0]
offset = 4*npts
# Create the points for the inner circle
npts = (int)((2*r2*np.pi)/h)
u = np.linspace(0, 2*np.pi, npts+1)[:-1]
for i in xrange(npts):
pts.append([r2*np.cos(u[-1-i]), r2*np.sin(u[-1-i])])
if i == npts-1:
segs.append([offset+i, offset])
else:
segs.append([offset+i, offset+i+1])
# Create the triangularization
tri = triangluate(pts, segs, hole=hole)
# cProfile.run('tri.frontal(h)')
tri.frontal(h)
tri.plot()
plt.savefig('bowyer_watson.pdf')
plt.show()
```
#### File: tmr/tmr/TopOptUtils.py
```python
from mpi4py import MPI
from tacs import TACS, elements
from tmr import TMR
from paropt import ParOpt
import numpy as np
from six import iteritems
try:
from scipy.optimize import minimize
except:
minimize = None
def createTopoProblem(forest, callback, filter_type, nlevels=2,
repartition=True, design_vars_per_node=1,
r0=0.05, N=10, lowest_order=2,
ordering=TACS.MULTICOLOR_ORDER,
use_galerkin=False,
scale_coordinate_factor=1.0):
"""
Create a topology optimization problem instance and a hierarchy of meshes.
This code takes in the OctForest or QuadForest on the finest mesh level
and creates a series of coarser meshes for analysis and optimization.
The discretization at each level is created via a callback function that
generates the appropriate TACSCreator object and its associated filter (the
QuadForest or OctForest on which the design parametrization is defined.)
The code then creates a TMRTopoFilter class which stores information about
the design parametrization and hierarchy. It creates a multigrid object and
finally a TMRTopoProblem instance for optimization.
The callback function takes in a forest object, corresponding to the finite-
element discretization and returns a creator object and a filter object in
the following form:
creator, filter = callback(forest)
Args:
callback: A callback function that takes in the forest and
returns the filter and the associated creator class
filter_type (str): Type of filter to create
forest (TMROctForest or TMRQuadForest): Forest type
repartition (bool): Repartition the mesh
design_vars_per_node (int): number of design variables for each node
r0 (float): Helmholtz/matrix filter radius
N (int): Matrix filter approximation parameter
lowest_order (int): Lowest order mesh to create
ordering: TACS Assembler ordering type
use_galerkin: Use Galerkin projection to obtain coarse grid operators
scale_coordinate_factor (float): Scale all coordinates by this factor
Returns:
problem (TopoProblem): The allocated topology optimization problem
"""
# Store data
forests = []
filters = []
assemblers = []
# Balance the forest and repartition across processors
forest.balance(1)
if repartition:
forest.repartition()
# Create the forest object
creator, filtr = callback(forest)
forests.append(forest)
filters.append(filtr)
assemblers.append(creator.createTACS(forest, ordering))
for i in range(nlevels-1):
order = forests[-1].getMeshOrder()
interp = forests[-1].getInterpType()
if order > lowest_order:
forest = forests[-1].duplicate()
order = order-1
forest.setMeshOrder(order, interp)
else:
forest = forests[-1].coarsen()
forest.setMeshOrder(order, interp)
# Balance and repartition if needed
forest.balance(1)
if repartition:
forest.repartition()
# Create the forest object
creator, filtr = callback(forest)
forests.append(forest)
filters.append(filtr)
assemblers.append(creator.createTACS(forest, ordering))
# Scale the coordinates by scale_coordinates factor if it is != 1.0
if scale_coordinate_factor != 1.0:
for assembler in assemblers:
X = assembler.createNodeVec()
assembler.getNodes(X)
X.scale(scale_coordinate_factor)
assembler.setNodes(X)
# Create the multigrid object
mg = TMR.createMg(assemblers, forests, use_galerkin=use_galerkin)
# Create the TMRTopoFilter object
filter_obj = None
if callable(filter_type):
filter_obj = filter_type(assemblers, filters)
elif isinstance(filter_type, str):
if filter_type == 'lagrange':
filter_obj = TMR.LagrangeFilter(assemblers, filters)
elif filter_type == 'matrix':
filter_obj = TMR.MatrixFilter(r0, N, assemblers, filters)
elif filter_type == 'conform':
filter_obj = TMR.ConformFilter(assemblers, filters)
elif filter_type == 'helmholtz':
filter_obj = TMR.HelmholtzFilter(r0, assemblers, filters)
problem = TMR.TopoProblem(filter_obj, mg)
return problem
def computeVertexLoad(name, forest, assembler, point_force):
"""
Add a load at vertices with the given name value. The assembler object must
be created from the forest. The point_force must be equal to the number of
variables per node in the assembler object.
Args:
name (str): Name of the surface where the traction will be added
forest (QuadForest or OctForest): Forest for the finite-element mesh
assembler (Assembler): TACSAssembler object for the finite-element problem
point_force (list): List of point forces to apply at the vertices
Returns:
Vec: A force vector containing the point load
"""
# Get the number of variable per node from the assembler
vars_per_node = assembler.getVarsPerNode()
if vars_per_node != len(point_force):
raise ValueError('Point force length must be equal to vars_per_node')
# Create the force vector and extract the array
force = assembler.createVec()
force_array = force.getArray()
# Retrieve the node numbers from the forest
nodes = forest.getNodesWithName(name)
comm = assembler.getMPIComm()
node_range = forest.getNodeRange()
# Add the point force into the force arrays
for node in nodes:
if ((node >= node_range[comm.rank]) and (node < node_range[comm.rank+1])):
index = node - node_range[comm.rank]
force_array[vars_per_node*index:vars_per_node*(index+1)] += point_force[:]
# Match the ordering of the vector
assembler.reorderVec(force)
return force
def computeTractionLoad(names, forest, assembler, trac):
"""
Add a surface traction to all quadrants or octants that touch a face or edge with
the given name. The assembler must be created from the provided forest. The list
trac must have a traction for each face (6) for octants or each edge (4) for
quadrants.
Note: This code uses the fact that the getOctsWithName or getQuadsWithName returns
the local face or edge index touching the surface or edge in the info member.
Args:
names (str) or list[(str)]: Name or list of names of the surface(s) where the traction will be added
forest (QuadForest or OctForest): Forest for the finite-element mesh
assembler (Assembler): TACSAssembler object for the finite-element problem
trac (list): List of tractions, one for each possible face/edge orientation
Returns:
Vec: A force vector containing the traction
"""
if isinstance(forest, TMR.OctForest):
octants = forest.getOctants()
if isinstance(names, str):
face_octs = forest.getOctsWithName(names)
else:
face_octs = []
for name in names:
face_octs.extend(forest.getOctsWithName(name))
elif isinstance(forest, TMR.QuadForest):
octants = forest.getQuadrants()
if isinstance(names, str):
face_octs = forest.getQuadsWithName(names)
else:
face_octs = []
for name in names:
face_octs.extend(forest.getQuadsWithName(name))
# Create the force vector and zero the variables in the assembler
force = assembler.createVec()
assembler.zeroVariables()
assembler.zeroDotVariables()
assembler.zeroDDotVariables()
# Create the auxiliary element class
aux = TACS.AuxElements()
for i in range(len(face_octs)):
index = face_octs[i].tag
if index is not None:
aux.addElement(index, trac[face_octs[i].info])
# Keep auxiliary elements already set in the assembler
# aux_tmp = assembler.getAuxElements()
assembler.setAuxElements(aux)
# Compute the residual where force = -residual
assembler.assembleRes(force)
force.scale(-1.0)
# Reset the auxiliary elements
assembler.setAuxElements(None) # (aux_tmp)
return force
def compute3DTractionLoad(name, forest, assembler, tr):
"""
Add a constant surface traction to all octants that touch a face or edge with
the given name.
Args:
forest (QuadForest or OctForest): Forest for the finite-element mesh
name (str): Name of the surface where the traction will be added
assembler (Assembler): TACSAssembler object for the finite-element problem
tr (list): The 3D components of the traction.
Returns:
Vec: A force vector containing the traction
"""
# Get the basis
element = assembler.getElements()[0]
basis = element.getElementBasis()
# Get the number of variables per node
vars_per_node = assembler.getVarsPerNode()
trac = []
for findex in range(6):
trac.append(elements.Traction3D(vars_per_node, findex, basis, tr))
return computeTractionLoad(name, forest, assembler, trac)
def interpolateDesignVec(orig_filter, orig_vec, new_filter, new_vec):
"""
This function interpolates a design vector from the original design space defined
on an OctForest or QuadForest and interpolates it to a new OctForest or QuadForest.
This function is used after a mesh adaptation step to get the new design space.
Args:
orig_filter (OctForest or QuadForest): Original filter Oct or QuadForest object
orig_vec (PVec): Design variables on the original mesh in a ParOpt.PVec
new_filter (OctForest or QuadForest): New filter Oct or QuadForest object
new_vec (PVec): Design variables on the new mesh in a ParOpt.PVec (set on ouput)
"""
# Convert the PVec class to TACSBVec
orig_x = TMR.convertPVecToVec(orig_vec)
if orig_x is None:
raise ValueError('Original vector must be generated by TMR.TopoProblem')
new_x = TMR.convertPVecToVec(new_vec)
if new_x is None:
raise ValueError('New vector must be generated by TMR.TopoProblem')
if orig_x.getVarsPerNode() != new_x.getVarsPerNode():
raise ValueError('Number of variables per node must be consistent')
orig_map = orig_x.getNodeMap()
new_map = new_x.getNodeMap()
vars_per_node = orig_x.getVarsPerNode()
# Create the interpolation class
interp = TACS.VecInterp(orig_map, new_map, vars_per_node)
new_filter.createInterpolation(orig_filter, interp)
interp.initialize()
# Perform the interpolation
interp.mult(orig_x, new_x)
return
def addNaturalFrequencyConstraint(problem, omega_min, **kwargs):
"""
Add a natural frequency constraint to a TopoProblem optimization problem
This function automatically sets good default arguments that can be
overridden with keyword arguments passed in through kwargs.
Args:
problem (TopoProblem): TopoProblem optimization problem
omega_min (float): Minimum natural frequency, Hz
**kwargs: Frequency constraint parameters; check
TMR documentation for more detail
"""
# Convert the provided minimum natural frequency from
# Hz to rad/s, square it, and make it negative to fit the
# constraint form: omega^2 - offset >= 0.0
offset = -(2.0*np.pi*omega_min)**2
# Define all the possible arguments and set defaults
opts = {'use_jd':True,
'num_eigs':10,
'ks_weight':50.0,
'offset':offset,
'sigma':-offset,
'scale':-0.75/offset,
'max_lanczos':100,
'tol':1e-30,
'eig_tol':5e-7,
'eig_rtol':1e-6,
'eig_atol':1e-12,
'num_recycle':10,
'fgmres_size':8,
'max_jd_size':50,
'recycle_type':'num_recycling'}
# Apply the user defined parameters
for key, value in kwargs.items():
if key in opts:
opts[key] = value
else:
raise ValueError('%s is not a valid option'%(key))
if opts['use_jd']:
# Set the recycling strategy
if opts['recycle_type'] == 'num_recycling':
recycle_type = TACS.NUM_RECYCLE
else:
recycle_type = TACS.SUM_TWO
problem.addFrequencyConstraint(opts['sigma'], opts['num_eigs'],
opts['ks_weight'], opts['offset'],
opts['scale'], opts['max_jd_size'],
opts['eig_tol'], opts['use_jd'],
opts['fgmres_size'], opts['eig_rtol'],
opts['eig_atol'], opts['num_recycle'],
recycle_type)
else: # use the Lanczos method
problem.addFrequencyConstraint(opts['sigma'], opts['num_eigs'],
opts['ks_weight'], opts['offset'],
opts['scale'],
opts['max_lanczos'], opts['tol'], 0,
0, 0, 0, 0, TACS.SUM_TWO,
opts['track_eigen_iters'])
return
def densityBasedRefine(forest, assembler, index=0,
lower=0.05, upper=0.5, reverse=False,
min_lev=0, max_lev=TMR.MAX_LEVEL):
"""
Apply a density-based refinement criteria.
This function takes in a Quad or OctForest that has been used for analysis and its
corresponding Assembler object. It then uses the data set in the constitutive object
to extract the density within each element. If the density falls below the the bound
*lower* the element is coarsened, if the density exceeds *upper* the element is
refined. If *reverse* is set, this scheme is reversed so low design values are
refined. The refinement is applied directly to the forest.
Args:
forest (QuadForest or OctForest): OctForest or QuadForest to refine
assembler (Assembler): The TACS.Assembler object associated with forest
index (int): The component index of the design vector used to indicate material
lower (float): the lower limit used for coarsening
upper (float): the upper limit used for refinement
reverse (bool): Reverse the refinement scheme
min_lev (int): Minimum refinement level
max_lev (int): Maximum refinement level
"""
# Create refinement array
num_elems = assembler.getNumElements()
refine = np.zeros(num_elems, dtype=np.int32)
# Get the elements from the Assembler object
elems = assembler.getElements()
for i in range(num_elems):
# Extract the design variables from the element
dvs_per_node = elems[i].getDesignVarsPerNode()
dvs = elems[i].getDesignVars(i)
# Apply the refinement criteria
if reverse:
value = np.min(dvs[index::dvs_per_node])
if value >= upper:
refine[i] = -1
elif value <= lower:
refine[i] = 1
else:
value = np.max(dvs[index::dvs_per_node])
if value >= upper:
refine[i] = 1
elif value <= lower:
refine[i] = -1
# Refine the forest
forest.refine(refine, min_lev=min_lev, max_lev=max_lev)
return
def approxDistanceRefine(forest, fltr, assembler, refine_distance, index=0,
domain_length=1.0, tfactor=0.05, cutoff=0.15,
filename=None, min_lev=0, max_lev=TMR.MAX_LEVEL):
"""
Apply a distance-based refinement criteria.
This function takes in a forest associated with the analysis, a filter associated
with the design variables and the corresponding assembler object. An approximate
distance function is computed using TMR which gives an approximation of the distance
to the closest point on the domain boundary. In this case, the domain boundary is
approximated as those points that are intermediate in [cutoff, 1-cutoff]. Since these
are applied to the filtered (not projected) states, there will be intermediate density
values. Finally, all elements that contain values that are within refine_distance to
the approximate boundary are refined, while all other elements are coarseend.
Notes: The index controls which component of the design variable is used to estimate
the distance (useful for multimaterial cases). The tfactor controls the approximation,
larger values of tfactor lead to more diffusive approximations, but small values may
lead to numerical issues. The actual factor value is determined baesd on the domain
length parameter which gives the characteristic length of the domain.
Args:
forest (QuadForest or OctForest): OctForest or QuadForest to refine
filtr (QuadForest or OctForest): OctForest or QuadForest for the filter object
assembler (Assembler): The TACS.Assembler object associated with forest
refine_distance (float): Refine all elements within this distance
index (int): The design variable component index (!= 0 for multimaterial cases)
tfactor (float): Factor applied to the domain_length for computing the approx dist.
cutoff (float): Cutoff to indicate structural interface
min_lev (int): Minimum refinement level
max_lev (int): Maximum refinement level
"""
# Set up and solve for an approximate level set function
x = assembler.createDesignVec()
assembler.getDesignVars(x)
# Approximate the distance to the boundary
dist = TMR.ApproximateDistance(fltr, x, index=index, cutoff=cutoff,
t=tfactor*domain_length, filename=filename)
# Create refinement array
num_elems = assembler.getNumElements()
refine = np.zeros(num_elems, dtype=np.int32)
for i in range(num_elems):
# Apply the refinement criteria
if dist[i] <= refine_distance:
refine[i] = 1
else:
refine[i] = -1
# Refine the forest
forest.refine(refine, min_lev=min_lev, max_lev=max_lev)
return
def targetRefine(forest, fltr, assembler, refine_distance,
interface_lev=2, interior_lev=1,
interface_index=-1, interior_index=0, reverse=False,
domain_length=1.0, tfactor=0.05, cutoff=0.15,
filename=None, min_lev=0, max_lev=TMR.MAX_LEVEL):
"""
Apply a target-based refinement strategy.
This refinement strategy employs a targeted refinement strategy. The goal is to
refine the interface elements, defined from an approximate distance calculation,
and the interior elements, defined as those elements with a given threshold of
the density field that are not close to the interface, to a prescribed level at
the first iteration. All other elements are coarsened aggressively.
Note: The interface and interior can be computed using different indices in
multimaterial optimization. When the interface index is negative, all materials are
considered during the interface distance calculation.
Args:
forest (QuadForest or OctForest): OctForest or QuadForest to refine
filtr (QuadForest or OctForest): OctForest or QuadForest for the filter object
assembler (Assembler): The TACS.Assembler object associated with forest
refine_distance (float): Refine all elements within this distance
interface_lev (int): Target interface refinement level
interior_lev (int): Target interior refinement level
interface_index (int): Design variable component index for the interface problem
interior_index (int): Design variable component index for the interior
reverse (boolean): Reverse the sense of the interior refinement
tfactor (float): Factor applied to the domain_length for computing the approx dist.
cutoff (float): Cutoff to indicate structural interface
filename (str): File name for the approximate distance calculation
min_lev (int): Minimum refinement level
max_lev (int): Maximum refinement level
"""
# Set up and solve for an approximate level set function
x = assembler.createDesignVec()
assembler.getDesignVars(x)
# Approximate the distance to the boundary
dist = TMR.ApproximateDistance(fltr, x, index=interface_index, cutoff=cutoff,
t=tfactor*domain_length, filename=filename)
# Create refinement array
num_elems = assembler.getNumElements()
refine = np.zeros(num_elems, dtype=np.int32)
# Compute the levels
if isinstance(forest, TMR.OctForest):
octants = forest.getOctants()
lev = np.zeros(len(octants))
for i, oc in enumerate(octants):
lev[i] = oc.level
elif isinstance(forest, TMR.QuadForest):
quads = forest.getQuadrants()
lev = np.zeros(len(quads))
for i, quad in enumerate(quads):
lev[i] = quad.level
# Get the elements from the Assembler object
elems = assembler.getElements()
for i in range(num_elems):
# Apply the refinement criteria
if dist[i] <= refine_distance:
refine[i] = interface_lev - lev[i]
else:
# Now check whether this is in the interior or exterior of
# the domain
dvs_per_node = elems[i].getDesignVarsPerNode()
dvs = elems[i].getDesignVars(i)
# Apply the refinement criteria
if reverse:
value = np.min(dvs[interior_index::dvs_per_node])
if value >= 1.0 - cutoff:
refine[i] = -1
elif value <= cutoff:
refine[i] = interior_lev - lev[i]
else:
value = np.max(dvs[interior_index::dvs_per_node])
if value >= 1.0 - cutoff:
refine[i] = interior_lev - lev[i]
elif value <= cutoff:
refine[i] = -1
# Refine the forest
forest.refine(refine, min_lev=min_lev, max_lev=max_lev)
return
class OptFilterWeights:
def __init__(self, diag, X, H):
"""
Compute an approximation of the coefficients of a Helmholtz filter.
Args:
diag (int): The index of the diagonal (base point) of the stencil
X (np.ndarray): An array of the node positions
H (np.ndarray): Symmetric matrix of second derivatives for the filter
"""
self.diag = diag
self.X = X
self.n = self.X.shape[0]
# Compute the normalization
if len(self.X.shape) == 1:
self.delta = np.max(np.absolute(self.X - self.X[self.diag]))
else:
self.delta = np.sqrt(np.max(
np.sum((self.X - self.X[self.diag,:])*(self.X - self.X[self.diag,:]), axis=1)))
self.dim = 3
if len(self.X.shape) == 1 or self.X.shape[1] == 1:
self.dim = 1
# Compute the constraint matrix
A = np.zeros((2, self.n-1))
# Populate the b vector
b = np.zeros(2)
b[1] = H[0,0]
index = 0
for i in range(self.n):
if i != self.diag:
dx = (self.X[i] - self.X[self.diag])/self.delta
A[0,index] = dx
A[1,index] = 0.5*dx**2
index += 1
elif self.X.shape[1] == 2:
self.dim = 2
# Compute the constraint matrix
A = np.zeros((5, self.n-1))
# Populate the b vector
b = np.zeros(5)
b[2] = H[0,0]
b[3] = H[1,1]
b[4] = 2.0*H[0,1]
index = 0
for i in range(self.n):
if i != self.diag:
dx = (self.X[i,0] - self.X[self.diag,0])/self.delta
dy = (self.X[i,1] - self.X[self.diag,1])/self.delta
A[0,index] = dx
A[1,index] = dy
A[2,index] = 0.5*dx**2
A[3,index] = 0.5*dy**2
A[4,index] = dx*dy
index += 1
else:
# Compute the constraint matrix
A = np.zeros((9, self.n-1))
# Populate the b vector
b = np.zeros(9)
b[3] = H[0,0]
b[4] = H[1,1]
b[5] = H[2,2]
b[6] = 2*H[1,2]
b[7] = 2*H[0,2]
b[8] = 2*H[0,1]
index = 0
for i in range(self.n):
if i != self.diag:
dx = (self.X[i,0] - self.X[self.diag,0])/self.delta
dy = (self.X[i,1] - self.X[self.diag,1])/self.delta
dz = (self.X[i,2] - self.X[self.diag,2])/self.delta
A[0,index] = dx
A[1,index] = dy
A[2,index] = dz
A[3,index] = 0.5*dx**2
A[4,index] = 0.5*dy**2
A[5,index] = 0.5*dz**2
A[6,index] = dy*dz
A[7,index] = dx*dz
A[8,index] = dx*dy
index += 1
self.b = b
self.A = A
return
def obj_func(self, w):
"""Evaluate the sum square of the weights"""
return 0.5*np.sum(w**2)
def obj_func_der(self, w):
"""Evaluate the derivative of the sum square of weights"""
return w
def con_func(self, w):
"""Compute the interpolation constraints"""
return np.dot(self.A, w) - self.b
def con_func_der(self, w):
"""Compute the derivative of the interpolation ocnstraints"""
return self.A
def set_alphas(self, w, alpha):
"""Compute the interpolating coefficients based on the weights"""
alpha[:] = 0.0
index = 0
for i in range(self.n):
if i != self.diag:
alpha[i] = w[index]/self.delta**2
alpha[self.diag] += w[index]/self.delta**2
index += 1
alpha[self.diag] += 1.0
return
class Mfilter(TMR.HelmholtzPUFilter):
def __init__(self, N, assemblers, filters, vars_per_node=1,
dim=2, r=0.01):
"""
Create an M-filter: A type of Helmholtz partition of unity filter that
approximates the Helmholtz PDE-based filter and maintains positive
coefficients over a range of meshes.
Args:
N (int): Number of terms in the approximate Neumann inverse
assemblers (list): List of TACS.Assembler objects
filters (list): List of TMR.QuadForest or TMR.OctForest objects
vars_per_node (int): Number of design variables at each node
dim (int): Spatial dimension of the problem
r (float): Filter radius
Note: You must call initialize() on the filter before use.
"""
self.r = r
self.dim = dim
return
def getInteriorStencil(self, diag, X, alpha):
"""Get the weights for an interior stencil point"""
H = self.r**2*np.eye(3)
# Reshape the values in the matrix
X = X.reshape((-1, 3))
n = X.shape[0]
if self.dim == 2:
X = X[:,:2]
# Set up the optimization problem
opt = OptFilterWeights(diag, X, H)
# Set the bounds and initial point
w0 = np.ones(n-1)
bounds = []
for i in range(n-1):
bounds.append((0, None))
res = minimize(opt.obj_func, w0, jac=opt.obj_func_der,
method='SLSQP', bounds=bounds,
constraints={'type': 'eq', 'fun': opt.con_func,
'jac': opt.con_func_der})
# Set the optimized alpha values
opt.set_alphas(res.x, alpha)
return
def getBoundaryStencil(self, diag, normal, X, alpha):
"""Get a sentcil point on the domain boundary"""
H = self.r**2*np.eye(2)
# Reshape the values in the matrix
X = X.reshape((-1, 3))
n = X.shape[0]
if self.dim == 2:
X = X[:,:2]
t = np.array([normal[1], -normal[0]])
Xt = np.dot(X - X[diag,:], t)
elif self.dim == 3:
# Reduce the problem to a 2d problem on linearization of the
# the domain boundary. First, compute an arbitrary direction
# that is not aligned along the normal direction
index = np.argmin(np.absolute(normal))
t = np.zeros(3)
t[index] = 1.0
# Compute the in-plane directions (orthogonal to the normal direction)
t2 = np.cross(t, normal)
t1 = np.cross(normal, t2)
# Reduce the problem on the boundary
Xt = np.zeros((n, 2))
Xt[:,0] = np.dot(X - X[diag,:], t1)
Xt[:,1] = np.dot(X - X[diag,:], t2)
# Set up the optimization problem
opt = OptFilterWeights(diag, Xt, H)
# Set the bounds and initial point
w0 = np.ones(n-1)
bounds = []
for i in range(n-1):
bounds.append((0, None))
res = minimize(opt.obj_func, w0, jac=opt.obj_func_der,
method='SLSQP', bounds=bounds,
constraints={'type': 'eq', 'fun': opt.con_func,
'jac': opt.con_func_der})
# Set the optimized alpha values
opt.set_alphas(res.x, alpha)
return
def setSurfaceBounds(problem, comm, forest, names,
face_lb=0.99, face_ub=1.0,
constrain_octs=True):
"""
Set upper and lower bounds on specific faces to
"require" material on certain boundaries
Args:
problem: TopoProblem object
comm: MPI communicator object
forest: TMROct(or Quad)Forest object
names (list): list of surface names where these
bounds should be applied
face_lb: lower bound value to apply
face_ub: upper bound value to apply
constrain_octs (bool): if True, constrain
the octants/quadrants on the surface;
If False, only constrain the boundary
nodes.
"""
assembler = problem.getAssembler()
x_vec = assembler.createDesignVec()
assembler.getDesignVars(x_vec)
x = x_vec.getArray()
dv = problem.createDesignVec()
lb = problem.createDesignVec()
ub = problem.createDesignVec()
dv[:] = x[:]
lb[:] = 1e-3
ub[:] = 1.0
face_dv = 0.5*(face_lb + face_ub)
for name in names:
mpi_rank = comm.Get_rank()
node_range = forest.getNodeRange()
if constrain_octs:
if isinstance(forest, TMR.OctForest):
octs = forest.getOctsWithName(name)
else:
octs = forest.getQuadsWithName(name)
conn = forest.getMeshConn()
node_octs = np.array([])
for oc in octs:
node_octs = np.append(node_octs,
conn[oc.tag, :])
else:
node_octs = forest.getNodesWithName(name)
node_octs = node_octs.astype(int)
for i in range(len(node_octs)):
if (node_octs[i] >= node_range[mpi_rank]) and \
(node_octs[i] < node_range[mpi_rank+1]):
index = int(node_octs[i] - node_range[mpi_rank])
dv[index] = face_dv
lb[index] = face_lb
ub[index] = face_ub
problem.setInitDesignVars(dv, lbvec=lb,
ubvec=ub)
return
``` |
{
"source": "12lol12lol12lol/user-service",
"score": 3
} |
#### File: routers/user/routes.py
```python
from db import db
from fastapi import APIRouter, HTTPException, status
from fastapi.param_functions import Depends
from fastapi.security import OAuth2PasswordRequestForm
from models import Token, User, UserModel, UserSignUpModel
from repository.user import UserRepo
from services import (SignUpUserService, UserServiceException, auth_user,
check_user_token, create_access_token)
from settings import settings
user_router = APIRouter()
@user_router.post('/auth/sign_up', response_model=UserModel, response_model_exclude={'password'})
async def sign_up(user: UserSignUpModel):
user_sign_up_service = SignUpUserService(user=user)
try:
res = await user_sign_up_service.run()
except UserServiceException as ex:
raise HTTPException(status_code=404, detail=ex.get_message()) from ex
return res
@user_router.post('/auth/token', response_model=Token)
async def token(form_data: OAuth2PasswordRequestForm = Depends()):
user = await auth_user(UserRepo, form_data.username, form_data.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token = create_access_token(data={'sub': user.username}, expires_delta=settings.token_expired)
return {
'access_token': access_token,
'token_type': 'bearer'
}
@user_router.get("/users/me/", response_model=User)
async def read_users_me(current_user: User = Depends(check_user_token)):
return current_user
``` |
{
"source": "12-malak/Pose-Estimation",
"score": 2
} |
#### File: alphapose/models/hardnet.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import collections
import numpy as np
import torch
from torch import nn
import torch.nn.functional as F
import torch.nn.init as init
from .builder import SPPE
from .layers.Resnet import ResNet
from .layers.SE_Resnet import SEResnet
from .layers.ShuffleResnet import ShuffleResnet
BN_MOMENTUM = 0.1
DEBUG = False
def conv3x3(in_planes, out_planes, stride=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class Identity(nn.Module):
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
class Flatten(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x.view(x.data.size(0),-1)
class CombConvLayer(nn.Sequential):
def __init__(self, in_channels, out_channels, norm_layer, kernel=1, stride=1, dropout=0.1, bias=False):
super().__init__()
self.add_module('layer1',ConvLayer(in_channels, out_channels, kernel))
self.add_module('layer2',DWConvLayer(out_channels, out_channels, norm_layer, stride=stride))
def forward(self, x):
return super().forward(x)
class DWConvLayer(nn.Sequential):
def __init__(self, in_channels, out_channels, norm_layer, stride=1, bias=False):
super().__init__()
out_ch = out_channels
groups = in_channels
kernel = 3
if DEBUG:
print(kernel, 'x', kernel, 'x', out_channels, 'x', out_channels, 'DepthWise')
self.add_module('dwconv', nn.Conv2d(groups, groups, kernel_size=3,
stride=stride, padding=1, groups=groups, bias=bias))
self.add_module('norm', norm_layer(groups, momentum=BN_MOMENTUM))
def forward(self, x):
return super().forward(x)
class ConvLayer(nn.Sequential):
def __init__(self, in_channels, out_channels, norm_layer, kernel=3, stride=1, padding=0, bias=False):
super().__init__()
self.out_channels = out_channels
out_ch = out_channels
groups = 1
if DEBUG:
print(kernel, 'x', kernel, 'x', in_channels, 'x', out_channels)
pad = kernel//2 if padding == 0 else padding
self.add_module('conv', nn.Conv2d(in_channels, out_ch, kernel_size=kernel,
stride=stride, padding=pad, groups=groups, bias=bias))
self.add_module('norm', norm_layer(out_ch, momentum=BN_MOMENTUM))
self.add_module('relu', nn.ReLU(True))
def forward(self, x):
return super().forward(x)
class BRLayer(nn.Sequential):
def __init__(self, in_channels, norm_layer):
super().__init__()
self.add_module('norm', norm_layer(in_channels))
self.add_module('relu', nn.ReLU(True))
def forward(self, x):
return super().forward(x)
class HarDBlock(nn.Module):
def get_link(self, layer, base_ch, growth_rate, grmul):
if layer == 0:
return base_ch, 0, []
out_channels = growth_rate
link = []
for i in range(10):
dv = 2 ** i
if layer % dv == 0:
k = layer - dv
link.append(k)
if i > 0:
out_channels *= grmul
out_channels = int(int(out_channels + 1) / 2) * 2
in_channels = 0
for i in link:
ch,_,_ = self.get_link(i, base_ch, growth_rate, grmul)
in_channels += ch
return out_channels, in_channels, link
def get_out_ch(self):
return self.out_channels
def __init__(self, in_channels, growth_rate, grmul, n_layers, norm_layer, keepBase=False, residual_out=False, dwconv=False):
super().__init__()
self.in_channels = in_channels
self.growth_rate = growth_rate
self.grmul = grmul
self.n_layers = n_layers
self.norm_layer = norm_layer
self.keepBase = keepBase
self.links = []
layers_ = []
self.out_channels = 0
for i in range(n_layers):
outch, inch, link = self.get_link(i+1, in_channels, growth_rate, grmul)
self.links.append(link)
use_relu = residual_out
if dwconv:
layers_.append(CombConvLayer(inch, outch, norm_layer))
else:
layers_.append(ConvLayer(inch, outch, norm_layer))
if (i % 2 == 0) or (i == n_layers - 1):
self.out_channels += outch
if DEBUG:
print("Blk out =",self.out_channels)
self.layers = nn.ModuleList(layers_)
def forward(self, x):
layers_ = [x]
for layer in range(len(self.layers)):
link = self.links[layer]
tin = []
for i in link:
tin.append(layers_[i])
if len(tin) > 1:
x = torch.cat(tin, 1)
else:
x = tin[0]
out = self.layers[layer](x)
layers_.append(out)
t = len(layers_)
out_ = []
for i in range(t):
if (i == 0 and self.keepBase) or \
(i == t-1) or (i%2 == 1):
out_.append(layers_[i])
out = torch.cat(out_, 1)
return out
class HarDBlock_v2(nn.Module):
def get_link(self, layer, base_ch, growth_rate, grmul):
if layer == 0:
return base_ch, 0, []
out_channels = growth_rate
link = []
for i in range(10):
dv = 2 ** i
if layer % dv == 0:
k = layer - dv
link.insert(0, k)
if i > 0:
out_channels *= grmul
out_channels = int(int(out_channels + 1) / 2) * 2
in_channels = 0
for i in link:
ch,_,_ = self.get_link(i, base_ch, growth_rate, grmul)
in_channels += ch
return out_channels, in_channels, link
def get_out_ch(self):
return self.out_channels
def __init__(self, in_channels, growth_rate, grmul, n_layers, norm_layer, dwconv=False):
super().__init__()
self.links = []
conv_layers_ = []
bnrelu_layers_ = []
self.layer_bias = []
self.out_channels = 0
self.norm_layer = norm_layer
self.out_partition = collections.defaultdict(list)
for i in range(n_layers):
outch, inch, link = self.get_link(i+1, in_channels, growth_rate, grmul)
self.links.append(link)
for j in link:
self.out_partition[j].append(outch)
cur_ch = in_channels
for i in range(n_layers):
accum_out_ch = sum( self.out_partition[i] )
real_out_ch = self.out_partition[i][0]
conv_layers_.append( nn.Conv2d(cur_ch, accum_out_ch, kernel_size=3, stride=1, padding=1, bias=True) )
bnrelu_layers_.append( BRLayer(real_out_ch, norm_layer) )
cur_ch = real_out_ch
if (i % 2 == 0) or (i == n_layers - 1):
self.out_channels += real_out_ch
self.conv_layers = nn.ModuleList(conv_layers_)
self.bnrelu_layers = nn.ModuleList(bnrelu_layers_)
def transform(self, blk, trt=False):
# Transform weight matrix from a pretrained HarDBlock v1
in_ch = blk.layers[0][0].weight.shape[1]
for i in range(len(self.conv_layers)):
link = self.links[i].copy()
link_ch = [blk.layers[k-1][0].weight.shape[0] if k > 0 else
blk.layers[0 ][0].weight.shape[1] for k in link]
part = self.out_partition[i]
w_src = blk.layers[i][0].weight
b_src = blk.layers[i][0].bias
self.conv_layers[i].weight[0:part[0], :, :,:] = w_src[:, 0:in_ch, :,:]
self.layer_bias.append(b_src)
#if b_src is not None:
# self.layer_bias[i] = b_src.view(1,-1,1,1)
if b_src is not None:
if trt:
self.conv_layers[i].bias[1:part[0]] = b_src[1:]
self.conv_layers[i].bias[0] = b_src[0]
self.conv_layers[i].bias[part[0]:] = 0
self.layer_bias[i] = None
else:
#for pytorch, add bias with standalone tensor is more efficient than within conv.bias
#this is because the amount of non-zero bias is small,
#but if we use conv.bias, the number of bias will be much larger
self.conv_layers[i].bias = None
else:
self.conv_layers[i].bias = None
in_ch = part[0]
link_ch.reverse()
link.reverse()
if len(link) > 1:
for j in range(1, len(link) ):
ly = link[j]
part_id = self.out_partition[ly].index(part[0])
chos = sum( self.out_partition[ly][0:part_id] )
choe = chos + part[0]
chis = sum( link_ch[0:j] )
chie = chis + link_ch[j]
self.conv_layers[ly].weight[chos:choe, :,:,:] = w_src[:, chis:chie,:,:]
#update BatchNorm or remove it if there is no BatchNorm in the v1 block
self.bnrelu_layers[i] = None
if isinstance(blk.layers[i][1], self.norm_layer):
self.bnrelu_layers[i] = nn.Sequential(
blk.layers[i][1],
blk.layers[i][2])
else:
self.bnrelu_layers[i] = blk.layers[i][1]
def forward(self, x):
layers_ = []
outs_ = []
xin = x
for i in range(len(self.conv_layers)):
link = self.links[i]
part = self.out_partition[i]
xout = self.conv_layers[i](xin)
layers_.append(xout)
xin = xout[:,0:part[0],:,:] if len(part) > 1 else xout
if self.layer_bias[i] is not None:
xin += self.layer_bias[i].view(1,-1,1,1)
if len(link) > 1:
for j in range( len(link) - 1 ):
ly = link[j]
part_id = self.out_partition[ly].index(part[0])
chs = sum( self.out_partition[ly][0:part_id] )
che = chs + part[0]
xin += layers_[ly][:,chs:che,:,:]
xin = self.bnrelu_layers[i](xin)
if i%2 == 0 or i == len(self.conv_layers)-1:
outs_.append(xin)
out = torch.cat(outs_, 1)
return out
class HarDNetBase(nn.Module):
def __init__(self, arch, norm_layer, depth_wise=False):
super().__init__()
if arch == 85:
first_ch = [48, 96]
second_kernel = 3
ch_list = [ 192, 256, 320, 480, 720]
grmul = 1.7
gr = [ 24, 24, 28, 36, 48]
n_layers = [ 8, 16, 16, 16, 16]
elif arch == 68:
first_ch = [32, 64]
second_kernel = 3
ch_list = [ 128, 256, 320, 640]
grmul = 1.7
gr = [ 14, 16, 20, 40]
n_layers = [ 8, 16, 16, 16]
else:
print("Error: HarDNet",arch," has no implementation.")
exit()
blks = len(n_layers)
self.base = nn.ModuleList([])
# First Layer: Standard Conv3x3, Stride=2
self.base.append (
ConvLayer(in_channels=3, out_channels=first_ch[0], norm_layer=norm_layer, kernel=3,
stride=2, bias=False) )
# Second Layer
self.base.append ( ConvLayer(first_ch[0], first_ch[1], norm_layer, kernel=second_kernel) )
# Maxpooling or DWConv3x3 downsampling
self.base.append(nn.AvgPool2d(kernel_size=3, stride=2, padding=1))
# Build all HarDNet blocks
ch = first_ch[1]
for i in range(blks):
blk = HarDBlock(ch, gr[i], grmul, n_layers[i], norm_layer, dwconv=depth_wise)
ch = blk.get_out_ch()
self.base.append ( blk )
if i != blks-1:
self.base.append ( ConvLayer(ch, ch_list[i], norm_layer, kernel=1) )
ch = ch_list[i]
if i== 0:
self.base.append(nn.AvgPool2d(kernel_size=2, stride=2, ceil_mode=True))
elif i != blks-1 and i != 1 and i != 3:
self.base.append(nn.AvgPool2d(kernel_size=2, stride=2))
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
if m.weight is not None:
init.kaiming_uniform_(m.weight, nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def weights_init(m):
for key in m.state_dict():
if key.split('.')[-1] == 'weight':
if 'conv' in key:
init.kaiming_uniform_(m.state_dict()[key], nonlinearity='relu')
if 'bn' in key:
m.state_dict()[key][...] = 1
elif key.split('.')[-1] == 'bias':
m.state_dict()[key][...] = 0
class TransitionUp(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
def forward(self, x, skip, concat=True):
out = F.interpolate(
x,
size=(skip.size(2), skip.size(3)),
mode="bilinear",
align_corners=True)
if concat:
out = torch.cat([out, skip], 1)
return out
@SPPE.register_module
class HarDNetPose(nn.Module):
def __init__(self, norm_layer=nn.BatchNorm2d, **cfg):
super(HarDNetPose, self).__init__()
assert cfg['DOWN_RATIO'] in [2, 4, 8, 16]
self.norm_layer = norm_layer
self._preset_cfg = cfg['PRESET']
self.first_level = int(np.log2(cfg['DOWN_RATIO']))-1
self.trt = cfg['TRT']
self.base = HarDNetBase(cfg['NUM_LAYERS'], norm_layer).base
self.last_pool = nn.AvgPool2d(kernel_size=2, stride=2)
if cfg['NUM_LAYERS'] == 85:
self.last_proj = ConvLayer(784, 256, norm_layer, kernel=1)
self.last_blk = HarDBlock(768, 80, 1.7, 8, norm_layer)
self.skip_nodes = [1,3,8,13]
self.SC = [32, 32, 0]
gr = [64, 48, 28]
layers = [8, 8, 4]
ch_list2 = [224 + self.SC[0], 160 + self.SC[1], 96 + self.SC[2]]
channels = [96, 214, 458, 784]
self.skip_lv = 3
scales = [2 ** i for i in range(len(channels[self.first_level:]))]
elif cfg['NUM_LAYERS'] == 68:
self.last_proj = ConvLayer(654, 192, norm_layer, kernel=1)
self.last_blk = HarDBlock(576, 72, 1.7, 8, norm_layer)
self.skip_nodes = [1,3,8,11]
self.SC = [32, 32, 0 ]
gr = [48, 32, 20]
layers = [8, 8, 4]
ch_list2 = [224+self.SC[0], 96+self.SC[1], 64+self.SC[2]]
channels = [64, 124, 328, 654]
self.skip_lv = 2
scales = [2 ** i for i in range(len(channels[self.first_level:]))]
self.transUpBlocks = nn.ModuleList([])
self.denseBlocksUp = nn.ModuleList([])
self.conv1x1_up = nn.ModuleList([])
self.avg9x9 = nn.AvgPool2d(kernel_size=(9,9), stride=1, padding=(4,4))
prev_ch = self.last_blk.get_out_ch()
for i in range(3):
skip_ch = channels[3-i]
self.transUpBlocks.append(TransitionUp(prev_ch, prev_ch))
if i < self.skip_lv:
cur_ch = prev_ch + skip_ch
else:
cur_ch = prev_ch
self.conv1x1_up.append(ConvLayer(cur_ch, ch_list2[i], norm_layer, kernel=1))
cur_ch = ch_list2[i]
cur_ch -= self.SC[i]
cur_ch *= 3
blk = HarDBlock(cur_ch, gr[i], 1.7, layers[i], norm_layer)
self.denseBlocksUp.append(blk)
prev_ch = blk.get_out_ch()
prev_ch += self.SC[0] + self.SC[1] + self.SC[2]
weights_init(self.denseBlocksUp)
weights_init(self.conv1x1_up)
weights_init(self.last_blk)
weights_init(self.last_proj)
out_channel = self._preset_cfg['NUM_JOINTS']
ch = max(128, out_channel*4)
self.conv_out = nn.Sequential(
nn.Conv2d(prev_ch, ch,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(ch, out_channel,
kernel_size=cfg['FINAL_CONV_KERNEL'], stride=1,
padding=cfg['FINAL_CONV_KERNEL'] // 2, bias=True))
fill_fc_weights(self.conv_out)
self.conv_out[-1].bias.data.fill_(-2.19)
def v2_transform(self):
print('Transform HarDBlock v2..')
for i in range( len(self.base)):
if isinstance(self.base[i], HarDBlock):
blk = self.base[i]
self.base[i] = HarDBlock_v2(blk.in_channels, blk.growth_rate, blk.grmul, blk.n_layers, blk.norm_layer)
self.base[i].transform(blk, self.trt)
blk = self.last_blk
self.last_blk = HarDBlock_v2(blk.in_channels, blk.growth_rate, blk.grmul, blk.n_layers, blk.norm_layer)
self.last_blk.transform(blk, self.trt)
for i in range(3):
blk = self.denseBlocksUp[i]
self.denseBlocksUp[i] = HarDBlock_v2(blk.in_channels, blk.growth_rate, blk.grmul, blk.n_layers, blk.norm_layer)
self.denseBlocksUp[i].transform(blk, self.trt)
def forward(self, x):
xs = []
x_sc = []
for i in range(len(self.base)):
x = self.base[i](x)
if i in self.skip_nodes:
xs.append(x)
x = self.last_proj(x)
x = self.last_pool(x)
x2 = self.avg9x9(x)
x3 = x/(x.sum((2,3),keepdim=True) + 0.1)
x = torch.cat([x,x2,x3],1)
x = self.last_blk(x)
for i in range(3):
skip_x = xs[3-i]
x = self.transUpBlocks[i](x, skip_x, (i<self.skip_lv))
x = self.conv1x1_up[i](x)
if self.SC[i] > 0:
end = x.shape[1]
x_sc.append( x[:,end-self.SC[i]:,:,:].contiguous() )
x = x[:,:end-self.SC[i],:,:].contiguous()
x2 = self.avg9x9(x)
x3 = x/(x.sum((2,3),keepdim=True) + 0.1)
x = torch.cat([x,x2,x3],1)
x = self.denseBlocksUp[i](x)
scs = [x]
for i in range(3):
if self.SC[i] > 0:
scs.insert(0, F.interpolate(
x_sc[i], size=(x.size(2), x.size(3)),
mode="bilinear", align_corners=True) )
x = torch.cat(scs,1)
x = self.conv_out(x)
return x
def _initialize(self, pretrained=''):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
nn.init.normal_(m.weight, std=0.001)
for name, _ in m.named_parameters():
if name in ['bias']:
nn.init.constant_(m.bias, 0)
elif isinstance(m, self.norm_layer):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.ConvTranspose2d):
nn.init.normal_(m.weight, std=0.001)
for name, _ in m.named_parameters():
if name in ['bias']:
nn.init.constant_(m.bias, 0)
if os.path.isfile(pretrained):
pretrained_state_dict = torch.load(pretrained)
need_init_state_dict = {}
for name, m in pretrained_state_dict.items():
if name.split('.')[0] in self.pretrained_layers \
or self.pretrained_layers[0] == '*':
need_init_state_dict[name] = m
self.load_state_dict(need_init_state_dict, strict=False)
elif pretrained:
raise ValueError('{} is not exist!'.format(pretrained))
def get_pose_net(cfg, is_train, **kwargs):
model = HarDNetPose(cfg, **kwargs)
if is_train and cfg.MODEL.INIT_WEIGHTS:
model._initialize(cfg.MODEL.INIT_WEIGHTS)
total_params = sum(p.numel() for p in model.parameters())
print( "Parameters=", total_params )
return model
``` |
{
"source": "12mashok/neat-python",
"score": 3
} |
#### File: neat/ctrnn/__init__.py
```python
from __future__ import division
from neat.graphs import required_for_output
class CTRNNNodeEval(object):
def __init__(self, time_constant, activation, aggregation, bias, response, links):
self.time_constant = time_constant
self.activation = activation
self.aggregation = aggregation
self.bias = bias
self.response = response
self.links = links
class CTRNN(object):
"""Sets up the ctrnn network itself."""
def __init__(self, inputs, outputs, node_evals):
self.input_nodes = inputs
self.output_nodes = outputs
self.node_evals = node_evals
self.values = [{}, {}]
for v in self.values:
for k in inputs + outputs:
v[k] = 0.0
for node, ne in self.node_evals.items():
v[node] = 0.0
for i, w in ne.links:
v[i] = 0.0
self.active = 0
self.time_seconds = 0.0
self.network_type = "ctrnn"
def reset(self):
self.values = [dict((k, 0.0) for k in v) for v in self.values]
self.active = 0
self.time_seconds = 0.0
def set_node_value(self, node_key, value):
for v in self.values:
v[node_key] = value
def get_max_time_step(self): # pragma: no cover
# TODO: Compute max time step that is known to be numerically stable for
# the current network configuration.
# pylint: disable=no-self-use
raise NotImplementedError()
def advance(self, inputs, advance_time, time_step=None):
"""
Advance the simulation by the given amount of time, assuming that inputs are
constant at the given values during the simulated time.
"""
final_time_seconds = self.time_seconds + advance_time
# Use half of the max allowed time step if none is given.
if time_step is None: # pragma: no cover
time_step = 0.5 * self.get_max_time_step()
if len(self.input_nodes) != len(inputs):
raise RuntimeError("Expected {0} inputs, got {1}".format(len(self.input_nodes), len(inputs)))
while self.time_seconds < final_time_seconds:
dt = min(time_step, final_time_seconds - self.time_seconds)
ivalues = self.values[self.active]
ovalues = self.values[1 - self.active]
self.active = 1 - self.active
for i, v in zip(self.input_nodes, inputs):
ivalues[i] = v
ovalues[i] = v
for node_key, ne in self.node_evals.items():
node_inputs = [ivalues[i] * w for i, w in ne.links]
s = ne.aggregation(node_inputs)
z = ne.activation(ne.bias + ne.response * s)
ovalues[node_key] += dt / ne.time_constant * (-ovalues[node_key] + z)
self.time_seconds += dt
ovalues = self.values[1 - self.active]
return [ovalues[i] for i in self.output_nodes]
@staticmethod
def create(genome, config, time_constant):
""" Receives a genome and returns its phenotype (a CTRNN). """
genome_config = config.genome_config
required = required_for_output(genome_config.input_keys, genome_config.output_keys, genome.connections)
# Gather inputs and expressed connections.
node_inputs = {}
for cg in genome.connections.values():
if not cg.enabled:
continue
i, o = cg.key
if o not in required and i not in required:
continue
if o not in node_inputs:
node_inputs[o] = [(i, cg.weight)]
else:
node_inputs[o].append((i, cg.weight))
node_evals = {}
for node_key, inputs in node_inputs.items():
node = genome.nodes[node_key]
activation_function = genome_config.activation_defs.get(node.activation)
aggregation_function = genome_config.aggregation_function_defs.get(node.aggregation)
node_evals[node_key] = CTRNNNodeEval(time_constant,
activation_function,
aggregation_function,
node.bias,
node.response,
inputs)
return CTRNN(genome_config.input_keys, genome_config.output_keys, node_evals)
```
#### File: neat-python/neat/noveltysearch.py
```python
import numpy as np
from sklearn.neighbors import NearestNeighbors
from neat.config import ConfigParameter, write_pretty_params
import matplotlib.pyplot as plt
class DefaultNoveltySearchConfig(object):
"""
Sets up and holds configuration information for the DefaultNoveltySearch class.
"""
def __init__(self, params = {}):
self._params = [ConfigParameter('novelty_search_enabled', bool, 'false'),
ConfigParameter('pop_knn_neighbours', int, '0'),
ConfigParameter('archive_knn_neighbours', int, '0'),
ConfigParameter('threshhold', float, '0'),
ConfigParameter('behavior_exclusion_type', list), 'None',
ConfigParameter('behavior_exclusion_value', list), 'None']
self.novelty_search_enabled = params.get('novelty_search_enabled', False)
self.pop_knn_neighbours = params.get('pop_knn_neighbours', None)
self.archive_knn_neighbours = params.get('archive_knn_neighbours', None)
self.threshhold = params.get('threshhold', None)
self.behavior_exclusion_type = params.get('behavior_exclusion_type', None)
self.behavior_exclusion_value = params.get('behavior_exclusion_value', None)
def __str__(self):
return "Default Novelty Search Configuration. Parameters include: {param}".format(param = self._params)
class DefaultNoveltySearch():
@classmethod
def parse_config(cls, param_dict):
novelty_config = DefaultNoveltySearchConfig(param_dict)
return novelty_config
def __init__(self):
"""
Initialises list of novelty members.
"""
self.novelMembers = {}
def calculateNovelty(self, genomes, config, iteration):
"""
Carries out two steps:
Step 1: Calculate how novel behavior is by euclidean distance/KNN.
Step 2: If individuals novelty is high, add to novelMembers list.
"""
# Extract novelty search configuration
configuration = config.novelty_search_config
# Extract behaviors of all genomes in the population
behaviors = {}
for genome_id, genome in genomes:
behaviors[genome_id] = genome.behavior
# Get all behavior values in current population and create a KNN model
behavior_values = np.array(list(behaviors.values()))
# If neighbours value is out of bounds, set it to max possible value
if int(configuration.pop_knn_neighbours) > len(behavior_values):
pop_knn_neighbours = len(behavior_values)
else:
pop_knn_neighbours = int(configuration.pop_knn_neighbours)
# FIRST KNN MODEL: fit on the behavior values of current population
# Obtain normalized data
normalized_behavior_values = self.normalizeData(behavior_values)
knn_model = NearestNeighbors(n_neighbors=pop_knn_neighbours, algorithm='ball_tree').fit(normalized_behavior_values)
if len(list(self.novelMembers.keys())) < 1:
models = [knn_model]
else:
# Get behaviors of novel member archive and create KNN model
novel_members = list(self.novelMembers.values())
novel_members_behaviors = np.array([member[0][1].behavior for member in novel_members])
novel_members_behaviors.reshape(-1,1)
# If neighbours value is out of bounds, set it to max possible value
if int(configuration.archive_knn_neighbours) > len(novel_members_behaviors):
archive_knn_neighbours = len(novel_members_behaviors)
else:
archive_knn_neighbours = int(configuration.archive_knn_neighbours)
# SECOND KNN MODEL:: Build knn model for novel member archive
#Obtain normalized data
normalized_novel_members_behaviors = self.normalizeData(novel_members_behaviors)
novel_members_knn_model = NearestNeighbors(n_neighbors=archive_knn_neighbours, algorithm='ball_tree').fit(normalized_novel_members_behaviors)
# Gather models
models = [knn_model, novel_members_knn_model]
# Novelty is assigned as the average distance to the k-nearest neighbors
# If genome is novel, average distance will be high.
self.calculatePopulationFitness(behaviors, genomes, models)
# Extract fitnesses of all genomes in the population after they have been calculated in previous step
fitnesses = {}
for genome_id, genome in genomes:
# store genome id as value for easy access
fitnesses[genome.fitness] = genome_id
# Get best genome, it's fitness, and behavior value
best_fitness = max(list(fitnesses.keys()))
best_fitness_genome_id = fitnesses[best_fitness]
best_behavior = behaviors[best_fitness_genome_id]
best_genome = [genome for genome in genomes if genome[0] == best_fitness_genome_id]
# If novel member archive has less than three members add best genomes
if len(list(self.novelMembers.keys())) < 1:
self.novelMembers[iteration] = best_genome
# If knn average of best genome is greater than threshold, add to novel member archive
else:
# If distance of best genome is greater than threshold, add to novel member archive
knn_distance = self.KNN_distance(best_behavior, novel_members_knn_model)
print('knn threshhold: ', configuration.threshhold, type(configuration.threshhold))
if knn_distance > float(configuration.threshhold):
self.novelMembers[iteration] = best_genome
# Return novel member archive and behavior for entire population
return self.novelMembers, behavior_values
def calculatePopulationFitness(self, behaviors, genomes, knn_models):
"""
Sets genome.fitness to average distance of n nearest neighbours
"""
# Get normalization factors
behavior_values = list(behaviors.values())
behavior_min = np.amin(behavior_values)
behavior_max = np.amax(behavior_values)
# For each genome
for genome_id, genome in genomes:
fitness = 0
# for each knn model provided
for knn_model in knn_models:
# Get corresponding behavior and normalize prior to checking knn
behavior = (behaviors[genome_id]-behavior_min)/(behavior_max-behavior_min)
# Get average knn distance
average_distance = self.KNN_distance(behavior, knn_model)
# Add average distance to fitness. The more novel a genome, the higher its average knn distance, and the higher its fitness
fitness += average_distance
# Set genonme fitness
genome.fitness = fitness
# print('fitness: ', genome.fitness)
def KNN_distance(self, behavior, knn_model):
"""
Returns average distance of a behavior in a given knn model
"""
behavior = np.array(behavior)
distances, indices = knn_model.kneighbors([behavior])
average_distance = sum(distances[0])/len(distances[0])
return average_distance
def normalizeData(self, data):
"""
Normalizes data according to X_norm = (X - X_min)/(X_max-X_min)
"""
# Transpose data
data = data.T
# Get shape
shape = data.shape
# The number of features is taken as the smaller dimension
number_of_dimensions = min(shape)
X = np.split(data, number_of_dimensions)
# Normalize each feature separately
for index, feature_data in enumerate(X):
print('feature data shape', feature_data.shape)
feature_data_min = np.amin(feature_data)
feature_data_max = np.amax(feature_data)
X[index] = (feature_data - feature_data_min)/(feature_data_max - feature_data_min)
#Concatenate X back together
X = np.concatenate(X)
#Transpose data back
X = X.T
return X
``` |
{
"source": "12MegaA21/LG-FedAvg",
"score": 2
} |
#### File: LG-FedAvg/models/Update.py
```python
import torch
from torch import nn
from torch.utils.data import DataLoader, Dataset
from tqdm import tqdm
import math
import pdb
class DatasetSplit(Dataset):
def __init__(self, dataset, idxs):
self.dataset = dataset
self.idxs = list(idxs)
def __len__(self):
return len(self.idxs)
def __getitem__(self, item):
image, label = self.dataset[self.idxs[item]]
return image, label
class LocalUpdate(object):
def __init__(self, args, dataset=None, idxs=None, pretrain=False):
self.args = args
self.loss_func = nn.CrossEntropyLoss()
self.selected_clients = []
self.ldr_train = DataLoader(DatasetSplit(dataset, idxs), batch_size=self.args.local_bs, shuffle=True)
self.pretrain = pretrain
def train(self, net, idx=-1, lr=0.1):
net.train()
# train and update
optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=0.5)
epoch_loss = []
if self.pretrain:
local_eps = self.args.local_ep_pretrain
else:
local_eps = self.args.local_ep
for iter in range(local_eps):
batch_loss = []
for batch_idx, (images, labels) in enumerate(self.ldr_train):
images, labels = images.to(self.args.device), labels.to(self.args.device)
net.zero_grad()
log_probs = net(images)
loss = self.loss_func(log_probs, labels)
loss.backward()
optimizer.step()
batch_loss.append(loss.item())
epoch_loss.append(sum(batch_loss)/len(batch_loss))
return net.state_dict(), sum(epoch_loss) / len(epoch_loss)
class LocalUpdateMTL(object):
def __init__(self, args, dataset=None, idxs=None, pretrain=False):
self.args = args
self.loss_func = nn.CrossEntropyLoss()
self.selected_clients = []
self.ldr_train = DataLoader(DatasetSplit(dataset, idxs), batch_size=self.args.local_bs, shuffle=True)
self.pretrain = pretrain
def train(self, net, lr=0.1, omega=None, W_glob=None, idx=None, w_glob_keys=None):
net.train()
# train and update
optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=0.5)
epoch_loss = []
if self.pretrain:
local_eps = self.args.local_ep_pretrain
else:
local_eps = self.args.local_ep
for iter in range(local_eps):
batch_loss = []
for batch_idx, (images, labels) in enumerate(self.ldr_train):
images, labels = images.to(self.args.device), labels.to(self.args.device)
net.zero_grad()
log_probs = net(images)
loss = self.loss_func(log_probs, labels)
W = W_glob.clone()
W_local = [net.state_dict(keep_vars=True)[key].flatten() for key in w_glob_keys]
W_local = torch.cat(W_local)
W[:, idx] = W_local
loss_regularizer = 0
loss_regularizer += W.norm() ** 2
k = 4000
for i in range(W.shape[0] // k):
x = W[i * k:(i+1) * k, :]
loss_regularizer += x.mm(omega).mm(x.T).trace()
f = (int)(math.log10(W.shape[0])+1) + 1
loss_regularizer *= 10 ** (-f)
loss = loss + loss_regularizer
loss.backward()
optimizer.step()
batch_loss.append(loss.item())
epoch_loss.append(sum(batch_loss)/len(batch_loss))
return net.state_dict(), sum(epoch_loss) / len(epoch_loss)
``` |
{
"source": "12moi/News-App",
"score": 3
} |
#### File: News-App/app/request.py
```python
import urllib.request, json
from flask.templating import render_template
from app.models import Article,Source
import datetime,timeago
api_key = None
category_url = None
source_url = None
headline_url = None
image=None
date_time_readable=None
def configure_request(app):
'''
this is a function that will configure the requests
'''
global api_key, source_url, category_url,headline_url
api_key = app.config['NEWS_API_KEY']
category_url = app.config['CATEGORY_URL']
source_url = app.config['SOURCE_URL']
headline_url = app.config['HEADLINE_URL']
def get_headlines():
'''
this is a function that will get the json response for the headlines
'''
get_headline_url = headline_url.format(api_key)
with urllib.request.urlopen(get_headline_url) as url:
get_headline_data = url.read()
get_headline_response = json.loads(get_headline_data)
headline_results = None
if get_headline_response['articles']:
headline_results_list = get_headline_response['articles']
headline_results = process_headline_results(headline_results_list)
return headline_results
def process_headline_results(headline_list):
'''
Function that processes the headline result and transform them to a list of Objects
'''
headline_results = []
for headline_item in headline_list:
image = headline_item.get('urlToImage')
title = headline_item.get ('title')
author = headline_item.get('author')
description = headline_item.get('description')
publishedAt = headline_item.get('publishedAt')
url = headline_item.get('url')
urlToImage = headline_item.get('urlToImage')
date_time_readable = datetime.datetime.strptime(publishedAt, '%Y-%m-%dT%H:%M:%SZ')
now = datetime.datetime.now() + datetime.timedelta(seconds = 60 * 3.4)
if urlToImage:
if description:
if publishedAt:
headline_object = Article(title,author,description,publishedAt,url,urlToImage)
headline_results.append(headline_object)
return headline_results
def get_category(category):
'''
this is a function that gets the json response to our url for request
'''
get_category_url = category_url.format(category, api_key)
with urllib.request.urlopen(get_category_url) as url:
get_category_data = url.read()
get_category_response = json.loads(get_category_data)
category_results = None
if get_category_response['articles']:
category_results_list = get_category_response['articles']
category_results = process_headline_results(category_results_list)
return category_results
def get_source():
'''
this is a function that gets the json response to our source request
'''
get_source_url = source_url.format(api_key)
with urllib.request.urlopen(get_source_url) as url:
get_source_article_data = url.read()
get_source_article_response = json.loads(get_source_article_data)
get_source_article_results = None
if get_source_article_response['sources']:
source_results_list = get_source_article_response['sources']
source_articles_results = process_source_results(source_results_list)
return source_results_list
def process_source_results(source_list):
'''
this is a function that will process the source result and then change them to a list of Objects
'''
source_results= []
for source_item in source_list:
id = source_item.get('id')
name = source_item.get('name')
url = source_item.get('url')
description = source_item.get('description')
language = source_item.get('language')
category= source_item.get('category')
country = source_item.get('country')
if url:
source_object = Source(id,name,url,description,category,country,language,)
source_results.append(source_object)
return source_results
def get_source_articles(id):
'''
this is a function that gets the json response to our source request
'''
get_source_article_url = 'https://newsapi.org/v2/top-headlines?sources={}&apiKey={}'.format(id,api_key)
with urllib.request.urlopen(get_source_article_url) as url:
get_source_data = url.read()
get_source_response = json.loads(get_source_data)
source_results = None
if get_source_response['articles']:
source_results_list =get_source_response['articles']
source_results = process_source_article_results(source_results_list)
return source_results
def process_source_article_results(headline_list):
'''
this is a function that process source result and transform them to an object list
'''
headline_results=[]
for headline_item in headline_list:
urlToImage = headline_item.get('urlToImage')
title = headline_item.get ('title')
author = headline_item.get('author')
description = headline_item.get('description')
publishedAt = headline_item.get('publishedAt')
language = headline_item.get('language')
category = headline_item.get('category')
country= headline_item.get('country')
url = headline_item.get('url')
if urlToImage:
if description:
if publishedAt:
headline_object = Article(title,author,description,publishedAt,url, urlToImage)
headline_results.append(headline_object)
return headline_results
def search_topic(topic_name):
'''
this function will get the json response to our source request
'''
search_topic_url = 'https://newsapi.org/v2/everything?apiKey={}&q={}'.format(api_key,topic_name)
with urllib.request.urlopen(search_topic_url) as url:
search_topic_data = url.read()
search_topic_response = json.loads(search_topic_data)
search_topic_results = None
if search_topic_response['articles']:
search_topic_list = search_topic_response['articles']
search_topic_results = process_headline_results((search_topic_list))
return search_topic_results
else:
return render_template('notfound.html')
``` |
{
"source": "12moi/password-locker",
"score": 4
} |
#### File: 12moi/password-locker/user.py
```python
from collections import UserList
class User:
'''
class that generates a new user instance
'''
# Empty user list array
user_list=[]
def __init__(self,firstname,lastname, username, userpassword):
self.username=username
self.firstname=firstname
self.lastname=lastname
self.password=<PASSWORD>
def save_user(self):
'''
save_user method saves a new user objects to the user_list
'''
User.user_list.append(self)
@classmethod
def diplay_user(cls):
return cls.user_list
def delete_user(self):
'''
A method that deletes a saved account from the list
'''
UserList.user_list.remove(self)
def verify_user(cls, username,password):
'''
A method that very the user if the user exist in the user_list
'''
a_user=""
for user in user.user_list:
if(username==username and password==password):
a_user=username
return a_user
class Credentials():
'''
Create credentials class to help create new objects of credentials
'''
acounts = []
@classmethod
def __init__(self,accountname,accountusername, accountpassword):
'''
a method that defines the user credentials to saved
'''
self.accountname=accountname
self.accountusername=accountusername
self.accountpassword=accountpassword
def save_account(self):
'''
this is a method that saves Accounts information
'''
Credentials.acounts.append(self)
def delete_account(self):
'''
Deletes saved account credentials
'''
Credentials.acounts.remove(self)
@classmethod
def display_accounts(cls):
'''
this method returns the accounts list
'''
for acount in cls.acounts:
return cls.acounts
@classmethod
def find_by_username(cls,username):
'''
This method takes in a number and finds a contact that matches the number
'''
for account in cls.acounts:
if account.accountusername==username:
return account
def save_credentials(self):
'''
save_user method saves a new user objects to the user_list
'''
Credentials.credentials_list.append(self)
def delete_credentils(self):
'''
A method that deletes a saved account from the list
'''
Credentials.credentials_list.remove(self)
@classmethod
def find_credentials(cls, account):
'''
method that take account and retrieves password for the account
'''
for credential in cls.credentials_list:
if credential.account==account:
return credential
@classmethod
def display_credentials(cls):
'''
A method that returns all the items in the credentials list
'''
return cls.credentials_list
``` |
{
"source": "12ok/python_train",
"score": 3
} |
#### File: python_train/generator/contact.py
```python
from model.contact import Contact
import string
import random
import re
import getopt
import sys
import os.path
import jsonpickle
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of contacts", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def clear_end_spase(s):
return re.sub(" $", "", s)
def random_string_alpha(prefix, maxlen):
symbols = string.ascii_letters + " "
return clear_end_spase(prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))]))
def random_string_digit(prefix, maxlen):
symbols = string.digits + " +()-"
return clear_end_spase(prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))]))
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "
return clear_end_spase(prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))]))
testdata = [Contact(firstname="", middlename="", lastname="", nik="", title="", company="", address="", home="",
mobile="", work="", fax="", email="", email2="", email3="", page="", address2="", phone2="",
notes="")] + [
Contact(firstname=random_string_alpha("name", 5), middlename=random_string_alpha("middlename", 10),
lastname=random_string_alpha("lastname", 10), nik=random_string("nik", 5),
title=random_string_alpha("", 2),
company=random_string("", 8), address=random_string("", 10), home=random_string_digit("hp", 11),
mobile=random_string_digit("mp", 8), work=random_string_digit("wp", 7),
fax=random_string_digit("f", 5),
email=random_string("e1", 10), email2=random_string("e2", 15), email3=random_string("e3", 20),
page=random_string("page", 15), address2=random_string("", 10),
phone2=random_string_digit("2p", 12), notes=random_string("", 10)) for i in range(5)]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w")as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
```
#### File: python_train/test/test_contact_db_matches_ui.py
```python
from model.contact import Contact
import re
def test_all_contacts_from_home_page_to_db(app, db):
contacts_homepage = sorted(app.contact.get_contact_list(), key=Contact.id_or_max, )
contacts_db = sorted(db.get_contact_list_full(), key=Contact.id_or_max)
# Проверка что кол-во контактов в UI и БД совпадает
assert len(contacts_homepage) == len(contacts_db)
for i in range(len(contacts_homepage)):
assert contacts_homepage[i].firstname == clear_end_spase(contacts_db[i].firstname)
assert contacts_homepage[i].lastname == clear_end_spase(contacts_db[i].lastname)
assert contacts_homepage[i].address == clear_end_spase(contacts_db[i].address)
assert contacts_homepage[i].all_emails == merge_emails_like_on_home_page(contacts_db[i])
assert contacts_homepage[i].all_phones_from_home_page == merge_phones_like_on_home_page(
contacts_db[i])
def clear(s):
return clear_end_spase(re.sub("[() -]", "", s))
def clear_end_spase(s):
return re.sub(" $", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home, contact.mobile, contact.work, contact.phone2]))))
def merge_emails_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
filter(lambda x: x is not None,
[contact.email, contact.email2, contact.email3])))
```
#### File: python_train/test/test_modify_group.py
```python
import random
import re
from model.group import Group
def clear(group):
return Group(id=group.id, name=re.sub(" +", " ", group.name).strip())
def test_modify_group_name(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="for test"))
old_groups = db.get_group_list()
old_group = random.choice(old_groups)
new_group = Group(id=old_group.id, name="new group")
app.group.modify_group_by_id(new_group, old_group.id)
new_groups = db.get_group_list()
for i in range(len(old_groups)):
if old_groups[i].id == old_group.id:
old_groups[i] = new_group
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(map(clear, new_groups), key=Group.id_or_max) == sorted(app.group.get_group_list(),
key=Group.id_or_max)
def test_modify_group_header(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(header="for test"))
old_groups = db.get_group_list()
old_group = random.choice(old_groups)
new_group = Group(id=old_group.id, name=old_group.name, header="new header")
app.group.modify_group_by_id(new_group, old_group.id)
new_groups = db.get_group_list()
for i in range(len(old_groups)):
if old_groups[i].id == old_group.id:
old_groups[i] = new_group
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(map(clear, new_groups), key=Group.id_or_max) == sorted(app.group.get_group_list(),
key=Group.id_or_max)
def test_modify_group_footer(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(footer="for test"))
old_groups = db.get_group_list()
old_group = random.choice(old_groups)
new_group = Group(id=old_group.id, name=old_group.name, footer="new header")
app.group.modify_group_by_id(new_group, old_group.id)
new_groups = db.get_group_list()
for i in range(len(old_groups)):
if old_groups[i].id == old_group.id:
old_groups[i] = new_group
assert old_groups == new_groups
if check_ui:
assert sorted(map(clear, new_groups), key=Group.id_or_max) == sorted(app.group.get_group_list(),
key=Group.id_or_max)
``` |
{
"source": "12rambau/alos_mosaics",
"score": 2
} |
#### File: component/tile/export.py
```python
import ipyvuetify as v
from sepal_ui import sepalwidgets as sw
from sepal_ui.scripts import utils as su
from component import scripts
from component.message import ms
from component import parameter as pm
# the tiles should all be heriting from the sepal_ui Tile object
# if you want to create extra reusable object, you can define them in an extra widget.py file
class ExportTile(sw.Tile):
def __init__(self, aoi_model, model, **kwargs):
# gather the model
self.aoi_model = aoi_model
self.model = model
# create an output alert
self.output = sw.Alert()
#
self.backscatter = v.Switch(
class_="ml-5", label=ms.export.backscatter, v_model=True
)
self.rfdi = v.Switch(class_="ml-5", label=ms.export.rfdi, v_model=True)
self.texture = v.Switch(class_="ml-5", label=ms.export.texture, v_model=False)
self.aux = v.Switch(class_="ml-5", label=ms.export.aux, v_model=False)
self.fnf = v.Switch(class_="ml-5", label=ms.export.fnf, v_model=False)
self.scale = v.TextField(label=ms.export.scale, v_model=25)
# create buttons
self.asset_btn = sw.Btn(
ms.export.asset_btn, "mdi-download", disabled=True, class_="ma-5"
)
self.sepal_btn = sw.Btn(
ms.export.sepal_btn, "mdi-download", disabled=True, class_="ma-5"
)
# bindings
self.model.bind(self.backscatter, "backscatter").bind(self.rfdi, "rfdi").bind(
self.texture, "texture"
).bind(self.aux, "aux").bind(self.fnf, "fnf").bind(self.scale, "scale")
# note that btn and output are not a madatory attributes
super().__init__(
id_="export_widget",
title=ms.export.title,
inputs=[
self.backscatter,
self.rfdi,
self.texture,
self.aux,
self.fnf,
self.scale,
],
alert=sw.Alert(),
btn=v.Layout(row=True, children=[self.asset_btn, self.sepal_btn]),
)
# decorate each function as we are using multiple btns
self._on_asset_click = su.loading_button(
self.alert, self.asset_btn, debug=False
)(self._on_asset_click)
self._on_sepal_click = su.loading_button(
self.alert, self.sepal_btn, debug=False
)(self._on_sepal_click)
# link the btn
self.asset_btn.on_event("click", self._on_asset_click)
self.sepal_btn.on_event("click", self._on_sepal_click)
def _select_layers(self):
dataset = None
if self.model.backscatter:
dataset = self.model.dataset.select(["HH", "HV", "HHHV_ratio"])
if self.model.rfdi:
if dataset:
dataset = dataset.addBands(self.model.dataset.select(["RFDI"]))
else:
dataset = self.model.dataset.select(["RFDI"])
if self.model.texture:
if dataset:
dataset = dataset.addBands(
self.model.dataset.select(
["HH_var", "HH_idm", "HH_diss", "HV_var", "HV_idm", "HV_diss"]
)
)
else:
dataset = self.model.dataset.select(
["HH_var", "HH_idm", "HH_diss", "HV_var", "HV_idm", "HV_diss"]
)
if self.model.aux:
if dataset:
dataset = dataset.addBands(
self.model.dataset.select(["angle", "date", "qa"])
)
else:
dataset = self.model.dataset.select(["angle", "date", "qa"])
fnf_dataset = None
if self.model.fnf and int(str(self.model.year)) <= 2017:
fnf_dataset = self.model.dataset.select(f"fnf_{self.model.year}")
return dataset, fnf_dataset
def _on_asset_click(self, widget, data, event):
dataset, fnf_dataset = self._select_layers()
# export the results
if dataset:
asset_id = scripts.export_to_asset(
self.aoi_model,
dataset,
pm.asset_name(self.aoi_model, self.model),
self.model.scale,
self.alert,
)
if fnf_dataset:
asset_id = scripts.export_to_asset(
self.aoi_model,
fnf_dataset,
pm.asset_name(self.aoi_model, self.model, True),
self.model.scale,
self.alert,
)
return
def _on_sepal_click(self, widget, data, event):
# get selected layers
dataset, fnf_dataset = self._select_layers()
if dataset:
# export the results
pathname = scripts.export_to_sepal(
self.aoi_model,
dataset,
pm.asset_name(self.aoi_model, self.model),
self.model.scale,
self.alert,
)
if fnf_dataset:
# export the results
pathname = scripts.export_to_sepal(
self.aoi_model,
fnf_dataset,
pm.asset_name(self.aoi_model, self.model, True),
self.model.scale,
self.alert,
)
return
``` |
{
"source": "12rambau/clip-time-series",
"score": 3
} |
#### File: component/parameter/pdf.py
```python
from itertools import product
def get_dims(N):
"""
I'm gonna check every combination from 1 to 20 lines and columns.
400 year of data max, I'll have a good life before anyone complains
"""
# A4 format in landscape
width = 11.69
heigh = 8.27
cols, lines = (None, None)
l = 0
for nb_col, nb_line in product(range(1, 21), range(1, 21)):
l_tmp = min(width / nb_col, heigh / nb_line)
if l_tmp > l and nb_col * nb_line > N:
l = l_tmp
cols = nb_col
lines = nb_line
return (cols, lines)
def getPositionPdf(i, nb_col):
"""Return the position of the square on the pdf page"""
return [int(i / nb_col), i % nb_col]
``` |
{
"source": "12rambau/clip-time-series-polygons",
"score": 2
} |
#### File: 12rambau/clip-time-series-polygons/scripts.py
```python
from pathlib import Path
import ee
from datetime import datetime as dt
from urllib.request import urlretrieve
import zipfile
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import rasterio as rio
import numpy as np
import gdal
from utils import *
from parameters import *
ee.Initialize()
def createPDF(file, df, raw_polygons, bands, sources, output):
start = dt.now().replace(microsecond=0)
# get the filename
filename = Path(file).stem
# extract the bands to use them in names
name_bands = '_'.join(bands.split(', '))
# pdf file
pdf_file = result_dir/f'{filename}_{name_bands}.pdf'
if pdf_file.is_file():
output.add_live_msg('Pdf already exist', 'success')
return pdf_file
# create a filename list
descriptions = {}
for year in range(start_year, end_year + 1):
descriptions[year] = {}
for index, row in df.iterrows():
descriptions[year][row['id']] = f'{filename}_{name_bands}_{year}_pt_{row.id}'
# load all the data in gdrive
satellites = {} # contain the names of the used satellites
task_list = []
for year in range(start_year, end_year + 1):
for index, row in df.iterrows():
# launch it only if the file is not in tmp, or in gdrive
task_name = descriptions[year][row['id']]
dst = tmp_dir/f'{task_name}.tif'
image, satellites[year] = getImage(sources, bands, row['ee_geometry'], year)
output.add_msg('exporting year {} for point {}'.format(year, row['id']))
if not dst.is_file():
name = f'{task_name}_zipimage'
link = image.getDownloadURL({
'name': name,
'region': row.ee_geometry,
'filePerBand': False,
'scale': getScale(satellites[year])
})
tmp = tmp_dir/f'{name}.zip'
urlretrieve (link, tmp)
# unzip the file
with zipfile.ZipFile(tmp,"r") as zip_:
data = zip_.read(zip_.namelist()[0])
dst.write_bytes(data)
# remove the zip
tmp.unlink()
# create the resulting pdf
with PdfPages(pdf_file) as pdf:
# each point is display on one single page
for index, row in df.iterrows():
page_title = f'Polygon_{row.id} ({row.name})'
output.add_msg(f'Creating pages for pt {row.id}')
nb_col, nb_line = get_dims(end_year-start_year)
fig, axes = plt.subplots(nb_line, nb_col, figsize=(11.69,8.27), dpi=500)
fig.suptitle(page_title, fontsize=16, fontweight ="bold")
# display the images in a fig and export it as a pdf page
cpt = 0
for year in range(start_year, end_year + 1):
# laod the file
file = tmp_dir/f'{descriptions[year][row.id]}.tif'
with rio.open(file) as f:
data = f.read([1, 2, 3], masked=True)
x_min, y_min, x_max, y_max = list(f.bounds)
bands = []
for i in range(3):
band = data[i]
h_, bin_ = np.histogram(band[np.isfinite(band)].flatten(), 3000, density=True) #remove the NaN from the analysis
cdf = h_.cumsum() # cumulative distribution function
cdf = 3000 * cdf / cdf[-1] # normalize
# use linear interpolation of cdf to find new pixel values
band_equalized = np.interp(band.flatten(), bin_[:-1], cdf)
band_equalized = band_equalized.reshape(band.shape)
bands.append(band_equalized)
data = np.stack( bands, axis=0 )
data = data/3000
data = data.clip(0, 1)
data = np.transpose(data,[1,2,0])
x_polygon, y_polygon = raw_polygons.loc[index]['geometry'].exterior.coords.xy
ax = axes[getPositionPdf(cpt, nb_col)[0], getPositionPdf(cpt, nb_col)[1]]
ax.imshow(data, interpolation='nearest', extent=[x_min, x_max, y_min, y_max])
ax.plot(x_polygon, y_polygon, color=polygon_color, linewidth=polygon_width)
ax.set_title(f'{year} {getShortname(satellites[year])}', x=.0, y=.9, fontsize='small', backgroundcolor='white', ha='left')
ax.axis('off')
ax.set_aspect('equal', 'box')
cpt += 1
# remove the file
file.unlink()
# finish the line with empty plots
while cpt < nb_line*nb_col:
ax = axes[getPositionPdf(cpt, nb_col)[0], getPositionPdf(cpt, nb_col)[1]]
ax.axis('off')
ax.set_aspect('equal', 'box')
cpt += 1
# save the page
plt.tight_layout()
pdf.savefig(fig)
plt.close()
elapsed_time = dt.now().replace(microsecond=0)-start
output.add_live_msg(f'PDF output finished in {elapsed_time}', 'success')
return pdf_file
```
#### File: 12rambau/clip-time-series-polygons/utils.py
```python
from shapely.geometry import Point
from math import sqrt
from pathlib import Path
from datetime import datetime
from itertools import product
import ee
from parameters import square_size
ee.Initialize()
#########################
#### constants ###
#########################
end_year = datetime.now().year
sources = ['landsat', 'sentinel']
##############################
##### folders ###
##############################
result_dir = Path.home()/'clip_results'
result_dir.mkdir(parents=True, exist_ok=True)
tmp_dir = Path.home()/'tmp'
# no need to create it it's init when SEPAL starts an instance
########################
## functions ##
########################
def get_dims(N):
'''
I'm gonna check every combination from 1 to 20 lines and columns.
400 year of data max, I'll have a good life before anyone complains
'''
# A4 format in landscape
width = 11.69
heigh = 8.27
cols, lines = (None, None)
l = 0
for nb_col, nb_line in product(range(1, 21), range(1, 21)):
l_tmp = min(width/nb_col, heigh/nb_line)
if l_tmp > l and nb_col*nb_line > N:
l = l_tmp
cols = nb_col
lines = nb_line
return (cols, lines)
def to_square(polygon):
minx, miny, maxx, maxy = polygon.bounds
# min size in latitude (appro)
min_size = square_size/111
# get the centroid
centroid = [(maxx+minx)/2, (maxy+miny)/2]
# get the diagonal
diagonal = max(min_size, sqrt((maxx-minx)**2+(maxy-miny)**2))
return Point(centroid).buffer(diagonal/2, cap_style=3)
##########################
## staelites inputs ##
##########################
def getPositionPdf(i, nb_col):
"""Return the position of the square on the pdf page"""
return [int(i/nb_col), i%nb_col]
def getSatellites(sources):
satellites = {}
if 'sentinel' in sources:
satellites.update({'sentinel_2': 'COPERNICUS/S2_SR'})
if 'landsat' in sources:
satellites.update({
'landsat_8': 'LANDSAT/LC08/C01/T1_SR',
'landsat_5': 'LANDSAT/LT05/C01/T1_SR',
'landsat_7': 'LANDSAT/LE07/C01/T1_SR',
})
return satellites
def getScale(satellite):
scale = {
'sentinel_2': 10,
'landsat_5': 30,
'landsat_7': 30,
'landsat_8': 30
}
return scale[satellite]
def getShortname(satellite):
short = {
'sentinel_2': 'S2',
'landsat_5': 'L5',
'landsat_7': 'L7',
'landsat_8': 'L8'
}
return short[satellite]
def getAvailableBands():
"""give the bands composition for each name.
0 being the landsat 7,
1 landsat 5,
2, landsat 8
3: sentinel 2"""
bands = {
'Red, Green, Blue' : {
'landsat_7': ['B3', 'B2', 'B1'],
'landsat_5': ['B3', 'B2', 'B1'],
'landsat_8': ['B4', 'B3', 'B2'],
'sentinel_2': ['B4', 'B3', 'B2']
},
'Nir, Red, Green' : {
'landsat_7': ['B4', 'B3', 'B2'],
'landsat_5': ['B4', 'B3', 'B2'],
'landsat_8': ['B5', 'B4', 'B3'],
'sentinel_2': ['B8', 'B4', 'B3']
},
'Nir, Swir1, Red' : {
'landsat_7': ['B4', 'B5', 'B3'],
'landsat_5': ['B4', 'B5', 'B3'],
'landsat_8': ['B5', 'B6', 'B4'],
'sentinel_2': ['B8', 'B11', 'B4']
},
'Swir2, Nir, Red' : {
'landsat_7': ['B7', 'B4', 'B3'],
'landsat_5': ['B7', 'B4', 'B3'],
'landsat_8': ['B7', 'B5', 'B4'],
'sentinel_2': ['B12', 'B8', 'B4']
},
'Swir2, Swir1, Red' : {
'landsat_7': ['B7', 'B5', 'B3'],
'landsat_5': ['B7', 'B5', 'B3'],
'landsat_8': ['B7', 'B6', 'B4'],
'sentinel_2': ['B12', 'B11', 'B4']
},
'Swir2, Nir, Green' : {
'landsat_7': ['B7', 'B4', 'B2'],
'landsat_5': ['B7', 'B4', 'B2'],
'landsat_8': ['B7', 'B5', 'B3'],
'sentinel_2': ['B12', 'B8', 'B3']
},
'ndvi' : { # 2 useful bands nir and red
'landsat_7': ['B4', 'B3'],
'landsat_5': ['B4', 'B3'],
'landsat_8': ['B5', 'B4'],
'sentinel_2': ['B8', 'B4']
},
'ndwi' : { # 2 useful bands nir and swir
'landsat_7': ['B4', 'B5'],
'landsat_5': ['B4', 'B5'],
'landsat_8': ['B5', 'B6'],
'sentinel_2': ['B8', 'B11']
}
}
return bands
def getCloudMask(satelliteId):
""" return the cloud masking function adapted to the apropriate satellite"""
if satelliteId in ['landsat_5', 'landsat_7']:
def cloudMask(image):
qa = image.select('pixel_qa')
# If the cloud bit (5) is set and the cloud confidence (7) is high
# or the cloud shadow bit is set (3), then it's a bad pixel.
cloud = qa.bitwiseAnd(1 << 5).And(qa.bitwiseAnd(1 << 7)).Or(qa.bitwiseAnd(1 << 3))
# Remove edge pixels that don't occur in all bands
mask2 = image.mask().reduce(ee.Reducer.min())
return image.updateMask(cloud.Not()).updateMask(mask2)
elif satelliteId == 'landsat_8':
def cloudMask(image):
# Bits 3 and 5 are cloud shadow and cloud, respectively.
cloudShadowBitMask = (1 << 3)
cloudsBitMask = (1 << 5)
# Get the pixel QA band.
qa = image.select('pixel_qa')
# Both flags should be set to zero, indicating clear conditions.
mask = qa.bitwiseAnd(cloudShadowBitMask).eq(0).And(qa.bitwiseAnd(cloudsBitMask).eq(0))
return image.updateMask(mask)
elif satelliteId == 'sentinel_2':
def cloudMask(image):
qa = image.select('QA60')
# Bits 10 and 11 are clouds and cirrus, respectively.
cloudBitMask = (1 << 10)
cirrusBitMask = (1 << 11)
# Both flags should be set to zero, indicating clear conditions.
mask = qa.bitwiseAnd(cloudBitMask).eq(0).And(qa.bitwiseAnd(cirrusBitMask).eq(0))
return image.updateMask(mask)#.divide(10000)
return cloudMask
def getImage(sources, bands, mask, year):
start = str(year) + '-01-01'
end = str(year) + '-12-31'
# priority selector for satellites
for satelliteId in getSatellites(sources):
dataset = ee.ImageCollection(getSatellites(sources)[satelliteId]) \
.filterDate(start, end) \
.filterBounds(mask) \
.map(getCloudMask(satelliteId))
if dataset.size().getInfo() > 0:
satellite = satelliteId
break
clip = dataset.median().clip(mask).select(getAvailableBands()[bands][satelliteId])
return (clip, satelliteId)
``` |
{
"source": "12rambau/commitizen",
"score": 2
} |
#### File: commitizen/commands/schema.py
```python
from commitizen import factory, out
from commitizen.config import BaseConfig
class Schema:
"""Show structure of the rule."""
def __init__(self, config: BaseConfig, *args):
self.config: BaseConfig = config
self.cz = factory.commiter_factory(self.config)
def __call__(self):
out.write(self.cz.schema())
```
#### File: commitizen/config/base_config.py
```python
from pathlib import Path
from typing import Any, Dict, Optional, Union
from commitizen.defaults import DEFAULT_SETTINGS
class BaseConfig:
def __init__(self):
self._settings: Dict[str, Any] = DEFAULT_SETTINGS.copy()
self._path: Optional[Path] = None
@property
def settings(self) -> Dict[str, Any]:
return self._settings
@property
def path(self) -> Optional[Path]:
return self._path
def set_key(self, key, value):
"""Set or update a key in the conf.
For now only strings are supported.
We use to update the version number.
"""
raise NotImplementedError()
def update(self, data: dict):
self._settings.update(data)
def add_path(self, path: Union[str, Path]):
self._path = Path(path)
def _parse_setting(self, data: Union[bytes, str]) -> dict:
raise NotImplementedError()
```
#### File: commitizen/tests/conftest.py
```python
import pytest
from commitizen import cmd
@pytest.fixture(scope="function")
def tmp_git_project(tmpdir):
with tmpdir.as_cwd():
cmd.run("git init")
yield tmpdir
@pytest.fixture(scope="function")
def tmp_commitizen_project(tmp_git_project):
with tmp_git_project.as_cwd():
tmp_commitizen_cfg_file = tmp_git_project.join("pyproject.toml")
tmp_commitizen_cfg_file.write("[tool.commitizen]\n" 'version="0.1.0"\n')
yield tmp_git_project
```
#### File: commitizen/tests/utils.py
```python
import uuid
from pathlib import Path
from typing import Optional
from commitizen import cmd, git
class FakeCommand:
def __init__(self, out=None, err=None, return_code=0):
self.out = out
self.err = err
self.return_code = return_code
def create_file_and_commit(message: str, filename: Optional[str] = None):
if not filename:
filename = str(uuid.uuid4())
Path(f"./{filename}").touch()
cmd.run("git add .")
git.commit(message)
``` |
{
"source": "12rambau/coverage_analysis",
"score": 2
} |
#### File: component/scripts/bfast_preanalysis.py
```python
import time
import numpy as np
import pandas as pd
import ee
import ipyvuetify as v
from matplotlib import pyplot as plt
from component.message import ms
from component import parameter as pm
from .helpers import *
from .cloud_masking import cloud_mask_S2
ee.Initialize()
def analysis(aoi, start, end, sensors, t2, sr):
coll = None
coll_type = "SR" if sr else "TOA"
if "l8" in sensors:
# create collection (with masking) and add NDVI
coll = create_collection(
ee.ImageCollection(f"LANDSAT/LC08/C01/T1_{coll_type}"),
t2,
start,
end,
aoi,
sr,
).map(addNDVIL8)
if "l7" in sensors:
# create collection (with masking) and add NDVI
l7_coll = create_collection(
ee.ImageCollection(f"LANDSAT/LE07/C01/T1_{coll_type}"),
t2,
start,
end,
aoi,
sr,
).map(addNDVILsat)
# merge collection
coll = coll.merge(l7_coll) if coll else l7_coll
if "l5" in sensors:
# create collection (with masking) and add NDVI
l5_coll = create_collection(
ee.ImageCollection(f"LANDSAT/LT05/C01/T1_{coll_type}"),
t2,
start,
end,
aoi,
sr,
).map(addNDVILsat)
# merge collection
coll = coll.merge(l5_coll) if coll else l5_coll
if "l4" in sensors:
# create collection (with masking) and add NDVI
l4_coll = create_collection(
ee.ImageCollection(f"LANDSAT/LT04/C01/T1_{coll_type}"),
t2,
start,
end,
aoi,
sr,
).map(addNDVILsat)
# merge collection
coll = coll.merge(l4_coll) if coll else l4_coll
if "s2" in sensors:
# define collection name based on SR or TOA
s2_coll_name = "S2_SR" if sr else "S2"
# Import and filter S2 SR.
s2_coll = (
ee.ImageCollection(f"COPERNICUS/{s2_coll_name}")
.filterBounds(aoi)
.filterDate(start, end)
)
# Import and filter s2cloudless.
s2_cloudless_coll = (
ee.ImageCollection("COPERNICUS/S2_CLOUD_PROBABILITY")
.filterBounds(aoi)
.filterDate(start, end)
)
# Join the filtered s2cloudless collection to the SR collection by the 'system:index' property.
joined_coll = ee.ImageCollection(
ee.Join.saveFirst("s2cloudless").apply(
**{
"primary": s2_coll,
"secondary": s2_cloudless_coll,
"condition": ee.Filter.equals(
**{"leftField": "system:index", "rightField": "system:index"}
),
}
)
)
s2_coll = (
joined_coll.map(cloud_mask_S2) if sr else joined_coll.map(cloud_mask_S2)
)
s2_coll = s2_coll.map(addNDVIS2)
# merge collection
coll = coll.merge(s2_coll) if coll else s2_coll
return coll
```
#### File: component/scripts/cloud_masking.py
```python
import ee
import math
ee.Initialize()
def cloud_mask_S2(image):
CLD_PRB_THRESH = 30
# Get s2cloudless image, subset the probability band.
cld_prb = ee.Image(image.get("s2cloudless")).select("probability")
# Condition s2cloudless by the probability threshold value.
is_not_cloud = cld_prb.lt(CLD_PRB_THRESH).rename("clouds")
return image.updateMask(is_not_cloud)
def cloud_mask_S2_SR(image):
CLD_PRB_THRESH = 30
NIR_DRK_THRESH = 0.15
CLD_PRJ_DIST = 1
BUFFER = 50
# Get s2cloudless image, subset the probability band.
cld_prb = ee.Image(image.get("s2cloudless")).select("probability")
# Condition s2cloudless by the probability threshold value.
is_cloud = cld_prb.gt(CLD_PRB_THRESH).rename("clouds")
# Identify water pixels from the SCL band.
not_water = image.select("SCL").neq(6)
# Identify dark NIR pixels that are not water (potential cloud shadow pixels).
SR_BAND_SCALE = 1e4
dark_pixels = (
image.select("B8")
.lt(NIR_DRK_THRESH * SR_BAND_SCALE)
.multiply(not_water)
.rename("dark_pixels")
)
# Determine the direction to project cloud shadow from clouds (assumes UTM projection).
shadow_azimuth = ee.Number(90).subtract(
ee.Number(image.get("MEAN_SOLAR_AZIMUTH_ANGLE"))
)
# Project shadows from clouds for the distance specified by the CLD_PRJ_DIST input.
cld_proj = (
is_cloud.directionalDistanceTransform(shadow_azimuth, CLD_PRJ_DIST * 10)
.reproject(**{"crs": image.select(0).projection(), "scale": 100})
.select("distance")
.mask()
.rename("cloud_transform")
)
# Identify the intersection of dark pixels with cloud shadow projection.
shadows = cld_proj.multiply(dark_pixels).rename("shadows")
# Combine cloud and shadow mask, set cloud and shadow as value 1, else 0.
is_cld_shdw = is_cloud.add(shadows).gt(0)
# Remove small cloud-shadow patches and dilate remaining pixels by BUFFER input.
# 20 m scale is for speed, and assumes clouds don't require 10 m precision.
is_cld_shdw = (
is_cld_shdw.focal_min(2)
.focal_max(BUFFER * 2 / 20)
.reproject(**{"crs": image.select([0]).projection(), "scale": 20})
.rename("cloudmask")
)
# return image.addBands(is_cld_shdw)
return image.updateMask(is_cld_shdw.unmask(0).neq(1))
def cloudMaskLsatSR(image):
# Select the QA band.
qa = image.select("pixel_qa")
# Get the internal_cloud_algorithm_flag bit.
cloud_mask = bitwiseExtract(qa, 5).eq(0)
shadow_mask = bitwiseExtract(qa, 3).eq(0)
# Return an image masking out cloudy areas.
return image.updateMask(cloud_mask).updateMask(shadow_mask)
def cloudMaskLsatTOA(image):
# Select the QA band.
qa = image.select("BQA")
# Get the internal_cloud_algorithm_flag bit.
cloud_mask = bitwiseExtract(qa, 4)
shadow_mask = bitwiseExtract(qa, 7, 8)
# Return an image masking out cloudy areas.
return image.updateMask(cloud_mask).updateMask(shadow_mask)
def bitwiseExtract(value, fromBit, toBit=None):
if not toBit:
toBit = fromBit
maskSize = ee.Number(1).add(toBit).subtract(fromBit)
mask = ee.Number(1).leftShift(maskSize).subtract(1)
return value.rightShift(fromBit).bitwiseAnd(mask)
``` |
{
"source": "12rambau/damage_proxy_map",
"score": 2
} |
#### File: component/io/dmp_io.py
```python
class DmpIo():
def __init__(self):
# inputs
self.event = None
self.username = None
self.password = None
```
#### File: component/tile/dmp_tile.py
```python
from sepal_ui import sepalwidgets as sw
import ipyvuetify as v
from component import widget as cw
from component.scripts import *
class DmpTile(sw.Tile):
def __init__(self, aoi_io, dmp_io):
# gather the io as class attribute
self.aoi_io = aoi_io
self.io = dmp_io
# create the widgets
self.date_picker = sw.DatePicker(label = 'Disaster event date')
self.username = v.TextField(
label = "Copernicus Scihub Username",
v_model = None
)
self.password = cw.PasswordField(label = "Copernicus Scihub Password")
# bind them with the output
self.output = sw.Alert() \
.bind(self.date_picker, self.io, 'event') \
.bind(self.username, self.io, 'username') \
.bind(self.password.text_field, self.io, 'password')
self.btn = sw.Btn("Launch the process")
# construct the tile
super().__init__(
id_ = "process_widget",
title = "Damage proxy map",
inputs = [self.date_picker, self.username, self.password],
output = self.output,
btn = self.btn
)
# link the click to an event
self.btn.on_event('click', self._on_click)
def _on_click(self, widget, data, event):
widget.toggle_loading()
if not self.output.check_input(self.aoi_io.get_aoi_name(), 'no aoi'): return widget.toggle_loading()
if not self.output.check_input(self.io.username, 'no username'): return widget.toggle_loading()
if not self.output.check_input(self.io.password, 'no password'): return widget.toggle_loading()
try:
check_computer_size(self.output)
create_dmp(self.aoi_io, self.io, self.output)
self.output.add_live_msg('Computation complete', 'success')
except Exception as e:
self.output.add_live_msg(str(e), 'error')
widget.toggle_loading()
return
```
#### File: component/widget/password.py
```python
import ipyvuetify as v
from sepal_ui import sepalwidgets as sw
from ipywidgets import jslink
class PasswordField(v.Layout, sw.SepalWidget):
EYE_ICONS = ['mdi-eye', 'mdi-eye-off']
TYPES = ['password', 'text']
def __init__(self, label = "password", **kwargs):
# set visibility status
self.password_viz = False
# create the eye icon
self.eye = v.Icon(class_ = 'ml-1', children=[self.EYE_ICONS[0]])
# create the widget
self.text_field = v.TextField(
v_model = None,
type = self.TYPES[0],
label=label
)
# create the textfield
super().__init__(
Row = True,
children = [self.text_field, self.eye],
v_model = None,
**kwargs
)
# link the icon to the display behaviour
self.eye.on_event('click', self._toggle_viz)
def _toggle_viz(self, widget, event, data):
viz = not self.password_viz
# change password viz
self.password_viz = viz
self.eye.children = [self.EYE_ICONS[viz]]
self.text_field.type = self.TYPES[viz]
return
``` |
{
"source": "12rambau/damage_proxy_maps",
"score": 2
} |
#### File: component/scripts/process.py
```python
import os
import shutil
import re
import time
from datetime import datetime as dt
from datetime import timedelta
from pathlib import Path
from zipfile import ZipFile
import numpy as np
import pyproj
import geopandas as gpd
import fiona
from osgeo import gdal
import rasterio as rio
from rasterio.merge import merge
from rasterio.features import shapes
import geemap
from ost import Sentinel1Batch
from ost.helpers import scihub
from component import parameter as pm
def check_computer_size():
"""check if the computer size will match the reuirements of the app"""
# we get available ram
with open("/proc/meminfo") as f:
meminfo = f.read()
matched = re.search(r"^MemTotal:\s+(\d+)", meminfo)
if matched:
mem_total_kB = int(matched.groups()[0])
# we check if available ram and cpus are enough
if mem_total_kB / 1024 / 1024 < 30 or os.cpu_count() < 4:
raise Exception(
"WARNING: You should run this notebook with an instance of at least 32Gb of Ram and 4 CPUs."
)
return
def create_dmp(aoi_model, model, output):
# create start date from 60 days before
event_date = dt.strptime(model.event, "%Y-%m-%d")
start = dt.strftime(event_date + timedelta(days=-60), "%Y-%m-%d")
end = dt.strftime((event_date + timedelta(days=+30)), "%Y-%m-%d")
# define project dir
project_dir = pm.result_dir / f"{model.event}_{aoi_model.name}"
output.add_live_msg(" Setting up project")
aoi = aoi_model.gdf.dissolve().geometry.to_wkt().values[0]
s1_slc = Sentinel1Batch(
project_dir=project_dir,
aoi=aoi,
start=start,
end=end,
product_type="SLC",
ard_type="OST-RTC",
)
# set tmp_dir
s1_slc.temp_dir = pm.tmp_dir
s1_slc.config_dict["temp_dir"] = pm.tmp_dir
## we get available ram
# with open('/proc/meminfo') as f:
# meminfo = f.read()
# matched = re.search(r'^MemTotal:\s+(\d+)', meminfo)
#
# if matched:
# mem_total_kB = int(matched.groups()[0])
#
## if we have more than 100GB ram we download there,
## that should speed up processing
# if mem_total_kB/1024/1024 > 100:
# print('Using ramdisk')
# s1_slc.download_dir = '/ram/download'
# Path(s1_slc.download_dir).mkdir(parents=True, exist_ok=True)
# s1_slc.config_dict['download_dir'] = s1_slc.download_dir
#
# get username and password
from ost.helpers.settings import HERBERT_USER
if model.username and model.password:
s1_slc.scihub_uname = model.username
s1_slc.scihub_pword = model.password
else:
s1_slc.scihub_uname = HERBERT_USER["uname"]
s1_slc.scihub_pword = HERBERT_USER["pword"]
s1_slc.asf_uname = HERBERT_USER["uname"]
s1_slc.asf_pword = HERBERT_USER["asf_pword"]
output.add_live_msg(" Searching for data")
s1_slc.search(base_url="https://scihub.copernicus.eu/dhus/")
# s1_slc.inventory_file = s1_slc.inventory_dir.joinpath('full.inventory.gpkg')
# s1_slc.read_inventory()
for i, track in enumerate(s1_slc.inventory.relativeorbit.unique()):
# filter by track
df = s1_slc.inventory[s1_slc.inventory.relativeorbit == track].copy()
# get all acquisitions dates for that track
datelist = sorted(
[dt.strptime(date, "%Y%m%d") for date in df.acquisitiondate.unique()]
)
# get difference in dates
date_diff = [
int(str(date - event_date).split(" ")[0].split(":")[0]) for date in datelist
]
# get only the negative ones (i.e. before event)
image_days = sorted([int(d) for d in date_diff if int(d) < 0])[-2:]
# continue if not
if len(image_days) != 2:
output.add_live_msg(
f" Not enough pre-event images available for track {track}", "warning"
)
time.sleep(2)
continue
output.add_live_msg(f" Including track {track} for processing")
# get only positives one (ie. after event)
#### we ignore images at the same day? #### or do we include, i.e. >= 0
image_days.append(sorted([int(d) for d in date_diff if int(d) > 0])[0])
print(image_days)
if len(image_days) != 3:
output.add_live_msg(
"""
Not all imagery is yet available. </br>
Processing the pre-event images for now </br>
Continue processing after new imagery is available
"""
)
####################################################
## Add an info when this will be the case ##
####################################################
idx = [True if date in image_days else False for date in date_diff]
final_dates = [
dt.strftime(date, "%Y%m%d")
for date in np.array(datelist)[np.array(idx) == True]
]
# if i == 0:
final_df = s1_slc.inventory[
(s1_slc.inventory.acquisitiondate.isin(final_dates))
& (s1_slc.inventory.relativeorbit == track)
]
# else:
# final_df = final_df.append(
# s1_slc.inventory[
# (s1_slc.inventory.acquisitiondate.isin(final_dates)) &
# (s1_slc.inventory.relativeorbit == track)
# ]
# )
output.add_live_msg(
" Downloading relevant Sentinel-1 SLC scenes ... (this may take a while)"
)
try:
s1_slc.download(
final_df,
mirror=2,
concurrent=10,
uname=s1_slc.asf_uname,
pword=s1_slc.asf_pword,
)
except:
print("here")
scihub.batch_download(
final_df,
s1_slc.download_dir,
s1_slc.scihub_uname,
s1_slc.scihub_pword,
concurrent=2,
base_url="https://scihub.copernicus.eu/dhus",
)
output.add_live_msg(" Create burst inventory")
s1_slc.create_burst_inventory(final_df)
# setting ARD parameters
output.add_live_msg(" Setting processing parameters")
s1_slc.ard_parameters["single_ARD"]["resolution"] = 30 # in metres
s1_slc.ard_parameters["single_ARD"]["create_ls_mask"] = False
s1_slc.ard_parameters["single_ARD"]["backscatter"] = False
s1_slc.ard_parameters["single_ARD"]["coherence"] = True
s1_slc.ard_parameters["single_ARD"]["coherence_bands"] = "VV" # 'VV, VH'
# production of polarimetric layers
s1_slc.ard_parameters["single_ARD"][
"H-A-Alpha"
] = False # does not give a lot of additional information
# resampling of image (not so important)
s1_slc.ard_parameters["single_ARD"]["dem"][
"image_resampling"
] = "BICUBIC_INTERPOLATION" # 'BILINEAR_INTERPOLATION'
# multi-temporal speckle filtering is quite effective
s1_slc.ard_parameters["time-series_ARD"]["mt_speckle_filter"][
"filter"
] = "Boxcar"
s1_slc.ard_parameters["time-series_ARD"]["remove_mt_speckle"] = True
s1_slc.ard_parameters["mosaic"]["cut_to_aoi"] = True
#
workers = int(4) if os.cpu_count() / 4 > 4 else int(os.cpu_count() / 4)
output.add_live_msg(f" We process {workers} bursts in parallel.")
s1_slc.config_dict["max_workers"] = workers
s1_slc.config_dict["executor_type"] = "concurrent_processes"
# process
output.add_live_msg("Processing... (this may take a while)")
s1_slc.bursts_to_ards(
timeseries=True, timescan=False, mosaic=False, overwrite=False
)
if len(image_days) != 3:
raise Exception("Something went wrong")
else:
output.add_live_msg("calculate change and merge results")
bursts = list(s1_slc.processing_dir.glob(f"[A,D]*{track}*"))
# we create the CCD for each burst
for burst in bursts:
track_name = burst.name[:4]
try:
coh_1 = list(burst.glob("Timeseries/01.*coh.VV.tif"))[0]
coh_2 = list(burst.glob("Timeseries/02.*coh.VV.tif"))[0]
dates = sorted(
[
coh_1.name.split(".")[1],
coh_1.name.split(".")[2],
coh_2.name.split(".")[2],
]
)
dst_file = burst.joinpath(
f"Timeseries/ccd_{burst.name}_{'_'.join(dates)}.tif"
)
with rio.open(coh_1) as pre_coh:
pre_arr = pre_coh.read()
meta = pre_coh.meta
meta.update(dtype="uint8", nodata=0)
with rio.open(coh_2) as post_coh:
post_arr = post_coh.read()
coh_diff = np.subtract(pre_arr, post_arr)
coh_diff[coh_diff < 0.27] = 0
coh_diff = coh_diff * 100
with rio.open(dst_file, "w", **meta) as dst:
dst.write(coh_diff.astype("uint8"))
except:
pass
# -----------------------------------------
# and merge the result
src_files_to_mosaic = []
for file in s1_slc.processing_dir.glob(
f"*[A,D]*{track}_*/Timeseries/ccd*tif"
):
src = rio.open(file)
src_files_to_mosaic.append(src)
mosaic, out_trans = merge(src_files_to_mosaic)
out_meta = src.profile.copy()
# Update the metadata
out_meta.update(
driver="GTiff",
height=mosaic.shape[1],
width=mosaic.shape[2],
transform=out_trans,
crs=src.crs,
tiled=True,
blockxsize=128,
blockysize=128,
compress="lzw",
)
tmp_dir = Path(s1_slc.config_dict["temp_dir"])
tmp_mrg = tmp_dir.joinpath(f"ccd_{track_name}_{'_'.join(dates)}.tif")
with rio.open(tmp_mrg, "w", **out_meta) as dest:
dest.write(mosaic)
# crop to aoi (some ost routine)
shapes_ = [row.geometry for _, row in aoi_model.gdf.iterrows()]
with rio.open(tmp_mrg) as src:
out_image, out_transform = rio.mask.mask(src, shapes_, crop=True)
out_meta = src.profile
out_meta.update(
{
"driver": "GTiff",
"height": out_image.shape[1],
"width": out_image.shape[2],
"transform": out_transform,
}
)
# create final output directory
dpm_out_dir = project_dir / f"Damage_Proxy_Maps"
dpm_out_dir.mkdir(parents=True, exist_ok=True)
out_ds_tif = dpm_out_dir / f"ccd_{track_name}_{'_'.join(dates)}.tif"
with rio.open(out_ds_tif, "w", **out_meta) as dest:
dest.write(out_image)
# delete tmpmerge
tmp_mrg.unlink()
# -----------------------------------------
# -----------------------------------------
# kmz and dmp output
# write a color file to tmp
ctfile = tmp_dir.joinpath("colourtable.txt")
f = open(ctfile, "w")
ct = [
"0 0 0 0 0\n"
"27 253 246 50 255\n"
"35 253 169 50 255\n"
"43 253 100 50 255\n"
"51 253 50 50 255\n"
"59 255 10 10 255\n"
"255 253 0 0 255"
]
f.writelines(ct)
f.close()
out_dpm_tif = dpm_out_dir / f"dpm_{track_name}_{'_'.join(dates)}.tif"
demopts = gdal.DEMProcessingOptions(
colorFilename=str(ctfile), addAlpha=True
)
gdal.DEMProcessing(
str(out_dpm_tif), str(out_ds_tif), "color-relief", options=demopts
)
opts = gdal.TranslateOptions(
format="KMLSUPEROVERLAY", creationOptions=["format=png"]
)
gdal.Translate(
str(out_dpm_tif.with_suffix(".kmz")), str(out_dpm_tif), options=opts
)
### adding legend like this to KMZ
# added = [
# "\t\t<ScreenOverlay>\n",
# "\t\t\t<name>\n",
# "Legend: Damage Proxy Map\n",
# "\t\t\t</name>\n",
# "\t\t\t<Icon>\n",
# "\t\t\t\t<href>https://raw.githubusercontent.com/12rambau/damage_proxy_map/refactoring/component/message/legend.png</href>\n",
# "\t\t\t</Icon>\n",
# '\t\t\t<overlayXY x="0.98" y="0.14" xunits="fraction" yunits="fraction"/>\n',
# '\t\t\t<screenXY x="0.98" y="0.14" xunits="fraction" yunits="fraction"/>\n',
# '\t\t\t<rotationXY x="0.5" y="0.5" xunits="fraction" yunits="fraction"/>\n',
# '\t\t\t<size x="0.1" y="0.18" xunits="fraction" yunits="fraction"/>\n',
# "\t\t</ScreenOverlay>\n",
# "\t</Document>\n",
# "</kml>\n"
# ]
# tmpzip = tmp_dir.joinpath('zipped')
# tmpzip.mkdir(parents=True, exist_ok=True)
#
# with ZipFile(out_dmp_tif.with_suffix('.kmz')) as zip_ref:
# zip_ref.extractall(tmpzip)
# with open(tmpzip.joinpath('doc.kml')) as f:
#
# lines = f.readlines()
# lines = lines[:-2]
# lines.extend(added)
#
# with open(tmpzip.joinpath('doc.kml'), 'w') as f:
# for ele in lines:
# f.write(ele)
#
# with ZipFile(out_dmp_tif.with_suffix('.kmz'), 'w') as zip_ref:
# # Iterate over all the files in directory
# for folderName, subfolders, filenames in os.walk(tmpzip):
# for filename in filenames:
# #create complete filepath of file in directory
# filePath = os.path.join(folderName, filename)
# # Add file to zip
# zip_ref.write(filePath, os.path.join('/0/0/', os.path.basename(filePath)))
# # -----------------------------------------
# -----------------------------------------
# polygonize (to points)
with rio.open(out_ds_tif) as src:
image = src.read()
mask = image != 0
geoms = [
{"properties": {"raster_val": v}, "geometry": s}
for i, (s, v) in enumerate(
shapes(image, mask=mask, transform=src.transform)
)
]
# geoms = list(results)
gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms)
gpd_polygonized_raster["geometry"] = gpd_polygonized_raster[
"geometry"
].centroid
gpd_polygonized_raster.to_file(
out_dpm_tif.with_suffix(".geojson"), driver="GeoJSON"
)
# remove storage intense files
try:
[file.unlink() for file in Path(s1_slc.download_dir).glob("**/*zip")]
[
file.unlink()
for file in Path(s1_slc.download_dir).glob("**/*downloaded")
]
[file.unlink() for file in Path(s1_slc.processing_dir).glob("**/*img")]
[file.unlink() for file in Path(s1_slc.processing_dir).glob("**/*tif")]
[
file.unlink()
for file in Path(s1_slc.processing_dir).glob("**/*processed")
]
except:
pass
# -----------------------------------------
try:
shutil.rmtree(s1_slc.download_dir)
shutil.rmtree(s1_slc.processing_dir)
except:
pass
return
``` |
{
"source": "12rambau/FCDM",
"score": 2
} |
#### File: component/parameter/viz_params.py
```python
def viz_forest_mask(key):
mask = {
"roadless": {
"min": 1,
"max": 15,
"palette": [
"#005000", # val 1. Evergreen forest
"#336333", # val 2. Evergreen forest within the plantation area
"#9b503c", # val 3. NEW degradation
"#87732d", # val 4. Ongoing degradation (disturbances still detected)
"#648723", # val 5. Degraded forest (former degradation, no disturbances detected anymore)
"#ff1400", # val 6. NEW deforestation (may follow degradation)
"#ffff9b", # val 7. Ongoing deforestation (disturbances still detected)
"#98e600", # val 8. NEW Regrowth
"#32a000", # val 9. Regrowthing
"#ffffff", # val 10. Other land cover (not water)
"#004da8", # val 11. Permanent Water (pekel et al.2015)
"#009dc8", # val 12. Seasonal Water (pekel et al.2015)
"#005000", # val 13. Not enough data at the beginning of the archive (before StartYear but forest)
"#005000", # val 14. No data for this specific year (after StartYear but forest)
"#ffffff", # val 15. Not enough data at the beginning of the archive but other lc
],
},
"gfc": {"min": 0, "max": 1, "palette": ["#ffffcc", "#006600"]},
"no_map": {},
}
if not key in mask.keys():
key = "gfc"
return mask[key]
legend_dict = {"forest mask": "#006600", "change": "Ce0f0f", "no change": "D3D3D3"}
```
#### File: component/scripts/process_scripts.py
```python
from functools import partial
import ee
ee.Initialize()
from component import parameter as cp
def check_forest_mask(asset, ee_aoi):
"""check that the given Image asset is a valid mask with values between 0 and 1"""
# exit on predefined ones
if asset in [v["value"] for v in cp.forest_map]:
return
image = ee.Image(asset)
# comupte a reducer
reduction = image.reduceRegion(
reducer=ee.Reducer.frequencyHistogram(),
geometry=ee_aoi.geometry(),
bestEffort=True,
)
# Remove all the unnecessary reducer output structure and make a list of values.
values = (
ee.Dictionary(reduction.get(image.bandNames().get(0)))
.keys()
.map(ee.Number.parse)
.getInfo()
)
print(values)
# raise an exception if values are out of 0 1
if not all(v in [0, 1] for v in values):
raise Exception(
"To be used as a forest mask, the selected asset need to be a binary Image with only 0 and 1 values"
)
return
def join_landsat_collections(coll1, coll2):
"""Joining of SR and TOA collections in order to make combined use of pixel_qa band and simple_cloud_score algorithm (Thanks to <NAME>)"""
eqfilter = ee.Filter.equals(rightField="system:index", leftField="system:index")
join = ee.ImageCollection(ee.Join.inner().apply(coll1, coll2, eqfilter))
# Inner join returns a FeatureCollection with a primary and secondary set of properties.
# vProperties are collapsed into different bands of an image.
joined = join.map(lambda el: ee.Image.cat(el.get("primary"), el.get("secondary")))
return joined.sort("system:time_start")
def IFORCE_PINO_step2(image, medianImage, apply_buffer, cloud_buffer):
"""
Masking Step S2_1 for Level-1C: Masking for clouds and cloud shadows (Sentinel-2)
S2 adapted version of single date classification proposed in http://publications.jrc.ec.europa.eu/repository/handle/JRC95065
Copyright: <NAME> (December 2018; <EMAIL>)
"""
# this function is only applyed to sentinel 2 sensor so can safely remove the sensor from the arguments
# and get all the useful bands from here
bands = cp.sensors["sentinel 2"]["bands"]
qa60 = image.select(bands["qa60"])
blue = image.select(bands["blue"])
aerosol = image.select(bands["aerosol"])
water_vapor = image.select(bands["water_vapor"])
green = image.select(bands["green"])
red = image.select(bands["red"])
red_edge_3 = image.select(bands["red_edge_3"])
red_edge_4 = image.select(bands["red_edge_4"])
swir1 = image.select(bands["swir1"])
red_edge_2 = image.select(bands["red_edge_2"])
qa60 = image.select(bands["qa60"])
esa_mask = qa60.eq(2048).And(blue.gt(0.12)).And(aerosol.gt(1800))
cloud_mask = (
aerosol.gt(2000)
.Or(aerosol.gt(1340).And(water_vapor.gt(300)))
.Or(aerosol.gt(1750).And(water_vapor.gt(230)))
.Or(esa_mask)
)
growing111 = (
blue.lte(green.add(blue.multiply(0.05)))
.And(green.lte(red.add(green.multiply(0.05))))
.And(red.lte(red_edge_3.add(red.multiply(0.05))))
.And(red_edge_3.lte(red_edge_4.add(red_edge_3.multiply(0.05))))
.And(red_edge_4.lte(swir1.add(red_edge_4.multiply(0.05))))
.And(aerosol.lt(1500))
)
growing28 = (
blue.lte(green)
.lte(red)
.lte(red_edge_2)
.lte(red_edge_3)
.lte(red_edge_4)
.And(swir1.gte(red_edge_2))
.And(aerosol.lt(1500))
)
aerosol_mask = aerosol.gt(1350).And(water_vapor.gt(400)).Or(aerosol.gt(2000))
blue_red_swir1 = [bands["blue"], bands["red"], bands["swir1"]]
spdist = image.select(blue_red_swir1).spectralDistance(
medianImage.select(blue_red_swir1)
)
mask1C_blue = blue.subtract(medianImage.select(bands["blue"])).divide(blue)
mask1C_red = red.subtract(medianImage.select(bands["red"])).divide(red)
mask1C_swir1 = swir1.subtract(medianImage.select(bands["swir1"])).divide(swir1)
mask1C_blue = mask1C_blue.gt(0.15).And(blue.gt(1300)).And(aerosol_mask)
mask1C_red = mask1C_red.gt(0.2).And(aerosol_mask)
mask1C_swir1 = mask1C_swir1.lt(-0.68).And(
spdist.gt(0.18)
) # remove small shadow pixels
# mask1C_swir1 takes lots of water (if changes) - distance is more robust and confirm both
final_mask = mask1C_red.multiply(2).add(mask1C_swir1)
# keny set to -0.65 or -0.67
# remove change from forest to soil using RED band < 1700
final_mask_mod = final_mask.where(final_mask.eq(2).And(green.lt(1000)), 0).where(
mask1C_blue, 3
)
if apply_buffer:
final_mask_mod = final_mask_mod.gt(0).focal_max(
cloud_buffer, "circle", "meters", 1
)
final_mask_mod = final_mask_mod.Or(cloud_mask).where(growing111.Or(growing28), 0)
return image.updateMask(final_mask_mod.eq(0))
def IFORCE_PINO_step1(image, apply_buffer, cloud_buffer):
"""
Single Date Classification ONLY MAIN CLASSED + WATER
Copyright: <NAME> (December 2018; <EMAIL>)
"""
# I took the liberty of removing ununsude parameters from the function
# this function is only applyed to sentinel 2 sensor so can safely remove the sensor from the arguments
# and get all the useful bands from here
bands = cp.sensors["sentinel 2"]["bands"]
blue = image.select(bands["blue"])
green = image.select(bands["green"])
red = image.select(bands["red"])
red_edg_3 = image.select(bands["red_edge_3"])
red_edge_4 = image.select(bands["red_edge_4"])
swir1 = image.select(bands["swir1"])
aerosol = image.select(bands["aerosol"])
red_edge_2 = image.select(bands["red_edge_2"])
qa60 = image.select(bands["qa60"])
water_vapor = image.select(bands["water_vapor"])
growing111 = (
blue.lte(green.add(blue.multiply(0.05)))
.And(green.lte(red.add(green.multiply(0.05))))
.And(red.lte(red_edge_3.add(red.multiply(0.05))))
.And(red_edge_3.lte(red_edge_4.add(red_edge_3.multiply(0.05))))
.And(red_edge_4.lte(swir1.add(red_edge_4.multiply(0.05))))
.And(swir1.lt(1500))
)
growing28 = (
blue.lte(green)
.lte(red)
.lte(red_edge_2)
.lte(red_edge_3)
.lte(red_edge_4)
.And(swir1.gte(red_edge_2))
.And(aerosol.lt(1500))
)
esa_mask = qa60.eq(2048).And(blue.gt(0.12)).And(aerosol.gt(1800))
cloud_mask = (
aerosol.gt(2000)
.Or(aerosol.gt(1340).And(water_vapor.gt(300)))
.Or(aerosol.gt(1750).And(water_vapor.gt(230)))
.Or(esa_mask)
)
if apply_buffer:
cloud_mask = cloud_mask.focal_max(cloud_buffer, "circle", "meters", 1)
cloud_mask = cloud_mask.where(growing111.Or(growing28), 0)
return image.updateMask(cloud_mask.eq(0))
def masking_1QB(image, cloud_buffer, sensor):
"""Masking options for clouds (Landsat 8)"""
# this fonction is only adapted to landsat 8
# I let sensor as an option to fit with the other masking function prototypes
bands = cp.sensors[sensor]["bands"]
nir = image.select(bands["nir"])
swir2 = image.select(bands["swir2"])
pixel_qa = image.select(bands["pixel_qa"])
cloud = image.select(bands["cloud"]) # build by the simple_cloud_score
bright_temp1 = image.select(bands["bright_temp1"])
# start the filtering
no_cloud_mask = nir.eq(0).And(swir2.eq(0))
cloud_pixel_qa = pixel_qa.bitwiseAnd(32).neq(0).And(cloud.gt(20))
cloud_shadow_pixel_qa = pixel_qa.bitwiseAnd(8).neq(0)
cloud_conf_qa = (
pixel_qa.bitwiseAnd(64)
.add(pixel_qa.bitwiseAnd(128))
.interpolate([0, 64, 128, 192], [0, 0, 1, 1], "clamp")
.int()
.And(cloud.gt(20))
)
cirrus_conf_qa = (
pixel_qa.bitwiseAnd(256)
.add(pixel_qa.bitwiseAnd(512))
.interpolate([0, 256, 512, 768], [0, 0, 1, 1], "clamp")
.int()
.And(cloud.gt(20))
)
simple_cloud_score = cloud.gte(13)
unsure_clouds = cloud.lt(13).And(cloud.gte(9)).And(bright_temp1.lte(292))
# aggregate all to build the mask
masked_cloud = (
no_cloud_mask.Or(cloud_pixel_qa)
.Or(cloud_shadow_pixel_qa)
.Or(cloud_conf_qa)
.Or(cirrus_conf_qa)
.Or(simple_cloud_score)
.Or(unsure_clouds)
)
if cloud_buffer:
masked_cloud = masked_cloud.focal_max(cloud_buffer, "circle", "meters", 1)
return image.updateMask(masked_cloud.add(1).unmask(0).eq(1))
def masking_S_1(image, cloud_buffer, sensor):
"""Masking Step S2_1 for Level-2A: Masking options for clouds (Sentinel-2) (still will be worked on)"""
# this fonction is only adapted to sentinel 2
# I let sensor as an option to fit with the other masking function prototypes
scl = image.select(cp.sensors[sensor]["bands"]["scl"])
S2A_clouds = scl.eq(7).Or(scl.eq(8)).Or(scl.eq(9)).Or(scl.eq(10))
S2A_shadows = scl.eq(3)
S2A_water = scl.eq(6)
S2A_masked = S2A_clouds.Or(S2A_shadows).Or(S2A_water)
if cloud_buffer:
S2A_masked = S2A_masked.focal_max(cloud_buffer, "circle", "meters", 1)
return image.updateMask(S2A_masked.add(1).unmask(255).eq(1))
def masking_L_1(image, cloud_buffer, sensor):
"""Masking Step 1: Masking options for clouds (any Landsat sensor)"""
bands = cp.sensors[sensor]["bands"]
pixel_qa = image.select(bands["pixel_qa"])
cloud = image.select("cloud") # from the simplecloud algorithm
nir = image.select(bands["nir"])
swir2 = image.select(bands["swir2"])
bright_temp1 = image.select(bands["bright_temp1"])
no_cloud_mask = nir.eq(0).And(swir2.eq(0))
cloud_pixel_qa = pixel_qa.bitwiseAnd(32).neq(0)
cloud_shadow_pixel_qa = pixel_qa.bitwiseAnd(8).neq(0)
cloud_conf_qa = (
pixel_qa.bitwiseAnd(64)
.add(pixel_qa.bitwiseAnd(128))
.interpolate([0, 64, 128, 192], [0, 0, 1, 1], "clamp")
.int()
)
cloud_shadow_sr_cloud_qa = (
image.select("sr_cloud_qa").bitwiseAnd(4).neq(0)
) # need to investigate where 'sr_cloud_qa' band come from
simple_cloud_score = cloud.gte(13)
unsure_clouds = cloud.lt(13).And(cloud.gte(9)).And(bright_temp1.lte(292))
masked_clouds = (
no_cloud_mask.Or(cloud_pixel_qa)
.Or(cloud_shadow_pixel_qa)
.Or(cloud_conf_qa)
.Or(cloud_shadow_sr_cloud_qa)
.Or(simple_cloud_score)
.Or(unsure_clouds)
)
if cloud_buffer:
masked_clouds = masked_clouds.focal_max(cloud_buffer, "circle", "meters", 1)
return image.updateMask(masked_clouds.add(1).unmask(0).eq(1))
masking_1 = {
"landsat 4": masking_L_1,
"landsat 5": masking_L_1,
"landsat 7": masking_L_1,
"landsat 8": masking_1QB,
"sentinel 2": masking_S_1,
}
def ddr_filter(nbr_diff, threshold, radius, nb_disturbances):
"""
clean the final result using a Disturbing-density-related (DDR) filtering
If a certain number of disturbance events is not reach within the moving kernel then the pixel value is masked (set to 0)
"""
# create a mask with the event threshold
nbr_diff_threshold = nbr_diff.where(nbr_diff.lt(threshold), 0).And(
(nbr_diff.where(nbr_diff.gte(threshold), 1))
)
# count the number of event in the kernel according to the mask
nbr_nb_events = nbr_diff.reduceNeighborhood(
reducer=ee.Reducer.sum().unweighted(), kernel=ee.Kernel.circle(radius, "meters")
)
# mask pixel where there are not enough events in the kernel
nbr_nb_events_mask = (
nbr_diff.where(nbr_nb_events.gte(nb_disturbances), 1)
.And((nbr_diff.where(nbr_nb_events.lt(nb_disturbances), 0)))
.unmask(-2)
)
# get back the real values where necessary
nbr_diff_ddr = (
nbr_nb_events_mask.multiply(nbr_diff).unmask(-2).updateMask(nbr_diff.mask())
)
return nbr_diff_ddr
def masking_2(image, forest_mask, year, forest_map, sensor):
"""Masking Step 2: Masking of sensor errors and non-forest areas"""
bands = cp.sensors[sensor]["bands"]
nir = image.select(bands["nir"])
swir2 = image.select(bands["swir2"])
blue = image.select(bands["blue"])
green = image.select(bands["green"])
red = image.select(bands["red"])
swir1 = image.select(bands["swir1"])
sensor_error = (
nir.lte(0)
.Or(swir2.lte(0))
.Or(blue.lte(0))
.Or(green.lte(0))
.Or(red.lte(0))
.Or(swir1.lte(0))
.add(1)
.unmask(0)
)
sensor_error_buffer = sensor_error.focal_min(
radius=50, kernelType="circle", units="meters", iterations=1
)
image = image.unmask(0)
out = {
"no_map": image.updateMask(sensor_error_buffer.eq(1).And(forest_mask.eq(1))),
"roadless": image.updateMask(sensor_error_buffer.eq(1)).updateMask(
forest_mask.select(f"Dec{year + 1}")
.eq(1)
.Or(forest_mask.select(f"Dec{year + 1}").eq(2))
.Or(forest_mask.select(f"Dec{year + 1}").eq(13))
.Or(forest_mask.select(f"Dec{year + 1}").eq(14))
),
"gfc": image.updateMask(sensor_error_buffer.eq(1)).updateMask(forest_mask),
}
if forest_map not in out.keys():
forest_map = "gfc"
return out[forest_map]
def compute_nbr(image, sensor):
"""
Compute nbr index
NBR = (NIR-SWIR2)/(NIR+SWIR2)
"""
bands = cp.sensors[sensor]["bands"]
nir = image.select(bands["nir"])
swir2 = image.select(bands["swir2"])
doy = ee.Algorithms.Date(ee.Number(image.get("system:time_start")))
yearday = ee.Number(doy.get("year")).add(
ee.Number.parse(doy.format("D")).divide(365)
)
# create an image out of the yearday value
yearday = ee.Image.constant(yearday).float().rename("yearday")
nbr = nir.subtract(swir2).divide(nir.add(swir2)).rename("NBR")
return nbr.addBands(yearday)
def adjustment_kernel(image, kernel_size):
"""
Adjustment kernel function, which self-references each NBR input scene
(in order to allow inter-scene comparability)
"""
nbr = image.select("NBR")
yearday = image.select("yearday")
return nbr.subtract(nbr.focal_median(kernel_size, "circle", "meters")).addBands(
yearday
)
def capping(image):
"""Capping at 0 and -1 (positive values are set to 0; values <= -1 are set to -1 because the latter mainly refer to active fires)"""
nbr = image.select("NBR")
yearday = image.select("yearday")
return nbr.where(nbr.gt(0), 0).where(nbr.lt(-1), -1).multiply(-1).addBands(yearday)
# getting the forest_mask var
def get_forest_mask(forest_map, year, treecover, aoi):
"""return the forest mask corresponding to the forest_map input"""
hansen = ee.Image(cp.hansen_gfc).clip(aoi)
if forest_map == "no_map":
forest_mask = hansen.select("treecover2000").gte(0)
forest_mask_display = forest_mask.updateMask(forest_mask)
elif forest_map == "roadless":
forest_mask = ee.ImageCollection(cp.roadless).mosaic().byte().clip(aoi)
forest_mask_display = forest_mask.updateMask(forest_mask).select(f"Dec{year+1}")
elif forest_map == "gfc":
basemap2000 = hansen.unmask(0).select("treecover2000").gte(treecover)
loss_year = hansen.unmask(0).select("lossyear")
change = loss_year.lte(year - 2000).And(loss_year.gt(0)).bitwise_not()
forest_mask = basemap2000.multiply(change)
forest_mask_display = forest_mask.select("treecover2000").mask(
forest_mask
) # .select(f'treecover2000')
else:
forest_mask = ee.Image(forest_map).select(0)
forest_mask_display = forest_mask.updateMask(forest_mask)
return (forest_mask, forest_mask_display)
def get_collection(
sensor, start, end, forest_map, year, forest_mask, cloud_buffer, aoi
):
# create the image collection
sr_collection = (
ee.ImageCollection(cp.sensors[sensor]["dataset"]["sr"])
.filterDate(start, end)
.filterBounds(aoi)
)
sr_toa_collection = sr_collection
if "landsat" in sensor:
# create the cloud ImageCollection
toa_collection = (
ee.ImageCollection(cp.sensors[sensor]["dataset"]["toa"])
.filterDate(start, end)
.filterBounds(aoi)
.map(ee.Algorithms.Landsat.simpleCloudScore)
.select("cloud")
)
sr_toa_collection = join_landsat_collections(sr_collection, toa_collection)
# masking of sensor errors and non-forest areas
sr_toa_masked_collection = sr_toa_collection.map(
partial(
masking_2,
forest_mask=forest_mask,
year=year,
forest_map=forest_map,
sensor=sensor,
)
)
# cloud masking
sr_toa_masked_collection = sr_toa_masked_collection.map(
partial(masking_1[sensor], sensor=sensor, cloud_buffer=cloud_buffer)
)
return sr_toa_masked_collection
```
#### File: component/tile/basemap_tile.py
```python
from sepal_ui import sepalwidgets as sw
import ipyvuetify as v
from component import parameter as cp
from component.message import cm
from component import widget as cw
class BasemapTile(sw.Tile):
def __init__(self, model):
# no need to gather the io object as attribute as there are no custom methods
# create the widgets
self.forest_map = cw.CustomAssetSelect(
label=cm.input_lbl.forest_map, v_model=model.forest_map, types=["IMAGE"]
)
self.forest_map.default_asset = cp.forest_map
# self.forest_map = v.Select(label=cm.input_lbl.forest_map, items=cp.forest_map, v_model=model.forest_map)
self.year = v.Slider(
class_="mt-5",
label=cm.input_lbl.forest_map_year,
min=cp.forest_map_min_year,
max=cp.forest_map_max_year,
v_model=model.forest_map_year,
thumb_label="always",
)
self.tree_cover = v.Slider(
class_="mt-5",
label=cm.input_lbl.treecover,
v_model=model.treecover,
thumb_label="always",
)
# bind the inputs to the io through an alert
model.bind(self.forest_map, "forest_map").bind(
self.year, "forest_map_year"
).bind(self.tree_cover, "treecover")
# create the tile
super().__init__(
"nested_widget",
cm.tile.basemap,
inputs=[self.forest_map, self.year, self.tree_cover],
)
# js behavior
self.forest_map.observe(self._update_status, "v_model")
model.observe(self._select_year, "reference_start")
def _update_status(self, change):
"""disable the hansen params if no forest mask is selected"""
# read the value
# make the difference between preselected and assets
value = change["new"]["value"] if type(change["new"]) == dict else change["new"]
date = value in ["gfc", "roadless"]
treecover = value == "gfc"
self.year.disabled = not date
self.tree_cover.disabled = not treecover
return self
def _select_year(self, change):
year = int(change["new"][:4])
self.year.v_model = min(
max(cp.forest_map_min_year, year), cp.forest_map_max_year
)
```
#### File: component/tile/fcdm_tile.py
```python
from sepal_ui import sepalwidgets as sw
import ipyvuetify as v
from component import parameter as cp
from component.message import cm
class FcdmTile(sw.Tile):
def __init__(self, model):
# create inputs
radius_title = v.Html(tag="h4", class_="mt-5", children=[cm.input_lbl.self_ref])
radius = v.Slider(
class_="mt-5",
label=cm.input_lbl.kernel_radius,
max=cp.max_kernel_radius,
step=10,
v_model=model.kernel_radius,
thumb_label="always",
)
ddr_title = v.Html(tag="h4", children=[cm.input_lbl.ddr])
threshold = v.Slider(
class_="mt-5",
label=cm.input_lbl.filter_threshold,
v_model=model.filter_threshod,
step=0.001,
max=0.1,
thumb_label="always",
)
filtering_radius = v.Slider(
class_="mt-5",
label=cm.input_lbl.filter_radius,
min=cp.min_radius_filtering_kernel,
max=cp.max_radius_filtering_kernel,
v_model=model.filter_radius,
step=10,
thumb_label="always",
)
cleaning = v.Slider(
class_="mt-5",
label=cm.input_lbl.disturbance_event,
max=cp.max_disturbing_event_per_kernel,
v_model=model.cleaning_offset,
thumb_label="always",
)
# bind to the io object
model.bind(radius, "kernel_radius").bind(threshold, "filter_threshod").bind(
filtering_radius, "filter_radius"
).bind(cleaning, "cleaning_offset")
super().__init__(
"nested_widget",
cm.tile.fcdm,
inputs=[
radius_title,
radius,
ddr_title,
threshold,
filtering_radius,
cleaning,
],
)
```
#### File: component/tile/result_tile.py
```python
from sepal_ui import sepalwidgets as sw
from sepal_ui import mapping as sm
from ipyleaflet import WidgetControl
from component.message import cm
from component import widget as cw
from component import parameter as cp
class ResultTile(sw.Tile):
def __init__(self):
# create a save widget
self.save = cw.ExportMap()
# create the map
self.m = cw.CustomMap()
self.m.max_zoom = (
14 # after this zoom level GEE crash and refuse to display images
)
# add a legend to the map
self.m.add_legend(legend_title="Legend", legend_dict=cp.legend_dict)
# add the export control
self.m.add_control(WidgetControl(widget=self.save, position="topleft"))
# create the tile
super().__init__("result_tile", cm.tile.result, inputs=[self.m])
```
#### File: component/tile/sensor_tile.py
```python
from datetime import datetime as dt
from sepal_ui import sepalwidgets as sw
import ipyvuetify as v
from component import parameter as cp
from component.message import cm
class SensorTile(sw.Tile):
def __init__(self, model):
# create adjustable variables end and start
self.end = dt.now().year
self.start = 1950 # prior to any sats
# create the widgets
self.sensors_select = v.Select(
label=cm.input_lbl.sensor,
items=[],
v_model=[],
multiple=True,
chips=True,
deletable_chips=True,
)
landsat_7_switch = v.Switch(
label=cm.input_lbl.do_threshold, v_model=model.improve_L7
)
landsat_7_slider = v.Slider(
class_="mt-5",
label=cm.input_lbl.threshold,
min=0,
max=0.3,
step=0.001,
v_model=model.improve_threshold,
thumb_label="always",
)
cloud_buffer = v.Slider(
class_="mt-5",
label=cm.input_lbl.cloud_buffer,
min=0,
max=2500,
step=10,
v_model=model.cloud_buffer,
thumb_label="always",
)
# bind them to io
model.bind(self.sensors_select, "sensors",).bind(
landsat_7_switch,
"improve_L7",
).bind(landsat_7_slider, "improve_threshold",).bind(
cloud_buffer,
"cloud_buffer",
)
super().__init__(
"nested_widget",
cm.tile.sensor,
inputs=[
self.sensors_select,
landsat_7_switch,
landsat_7_slider,
cloud_buffer,
],
alert=sw.Alert(),
)
# add js behaviour
self.sensors_select.observe(self._check_sensor, "v_model")
model.observe(self._change_start, "reference_start")
model.observe(self._change_end, "analysis_end")
def _check_sensor(self, change):
"""
prevent users from selecting landsat and sentinel 2 sensors
provide a warning message to help understanding
"""
# exit if its a removal
if len(change["new"]) < len(change["old"]):
self.alert.reset()
return self
# use positionning in the list as boolean value
sensors = ["landsat", "sentinel"]
# guess the new input
new_value = list(set(change["new"]) - set(change["old"]))[0]
id_ = next(i for i, s in enumerate(sensors) if s in new_value)
if sensors[id_] in new_value:
if any(sensors[not id_] in s for s in change["old"]):
change["owner"].v_model = [new_value]
self.alert.add_live_msg(cm.no_mix, "warning")
else:
self.alert.reset()
return self
def _change_end(self, change):
self.end = int(change["new"][:4]) if change["new"] else dt.now().year
self._check_sensor_availability()
return self
def _change_start(self, change):
self.start = int(change["new"][:4]) if change["new"] else 1950
self._check_sensor_availability()
return self
def _check_sensor_availability(self):
"""reduce the number of available satellites based on the dates selected by the user"""
# reset current values
self.sensors_select.items = []
self.sensors_select.v_model = []
# check every satellite availability
years = range(self.start, self.end + 1)
sensors = []
for s in cp.sensors:
if any(e in years for e in [cp.sensors[s]["start"], cp.sensors[s]["end"]]):
sensors.append(s)
elif (
cp.sensors[s]["start"] < self.start and cp.sensors[s]["end"] > self.end
):
sensors.append(s)
self.sensors_select.items = sensors
return self
```
#### File: component/widget/custom_asset_select.py
```python
from sepal_ui import sepalwidgets as sw
from sepal_ui.scripts import utils as su
from component import parameter as cp
class CustomAssetSelect(sw.AssetSelect):
@su.switch("loading")
def _validate(self, change):
super()._validate(change)
# if the select asset is one of the default one the keep it
if change["new"] in cp.forest_map:
self.error_messages = None
self.error = False
return self
``` |
{
"source": "12rambau/forest_at_risk",
"score": 2
} |
#### File: component/scripts/fcc.py
```python
import ee
from component import parameter as cp
def is_tmf_covered(geometry):
"""return true if there is more than 0 images"""
return (
ee.ImageCollection(cp.fcc_sources["TMF"]["asset"])
.filterBounds(geometry)
.size()
.getInfo()
!= 0
)
def get_fcc(source, start, end):
"""retreive the image from GEE based on the selected parameters"""
if source == "TMF":
# JRC annual product (AP)
ap = ee.ImageCollection(cp.fcc_sources[source]["asset"]).mosaic().byte()
# ap_allYear: forest if Y = 1 or 2.
ap_forest = ap.where(ap.eq(2), 1)
ap_all_year = ap_forest.where(ap_forest.neq(1), 0)
# convert the dates in band number
b_final = 2022 - 1990
b_start = start - 1990
b_end = end - 1990
# Forest in start date
ap_start = ap_all_year.select(list(range(b_start, b_final)))
forest_start = ap_start.reduce(ee.Reducer.sum()).gte(1)
forest_mask = forest_start.eq(1)
# Forest in end date
ap_end = ap_all_year.select(list(range(b_end, b_final)))
forest_end = ap_end.reduce(ee.Reducer.sum()).gte(1)
# final deforestation map
# 0 where there is deforestation, 1 where it's not, nodata where it was no forest
forest = forest_end.subtract(forest_start).mask(forest_mask).add(1)
elif source == "GFC":
# we define a treecover at
perc = 10
# Hansen map
gfc = ee.Image(cp.fcc_sources[source]["asset"])
# Tree cover, loss, and gain
treecover = gfc.select(["treecover2000"])
lossyear = gfc.select(["lossyear"])
# Forest in 2000
forest2000 = treecover.gte(10)
forest2000 = forest2000.toByte()
# convert date in deforestation values
v_start = start - 2000
v_end = end - 2000
# Deforestation
loss_start = lossyear.gte(1).And(lossyear.lte(v_start))
loss_end = lossyear.get(1).And(lossyear.lte(v_end))
# Forest
forest_start = forest2000.where(loss_start.eq(1), 0)
forest_mask = forest_start.eq(1)
forest_end = forest2000.where(loss_end.eq(1), 0)
# final deforestation map
# 0 where there is deforestation, 1 where it's not, nodata where it was no forest
forest = forest_end.subtract(forest_start).mask(forest_mask).add(1)
return forest.int8()
``` |
{
"source": "12rambau/pytfa",
"score": 3
} |
#### File: pytfa/io/json.py
```python
import json
import numpy
from .dict import model_from_dict, model_to_dict
class MyEncoder(json.JSONEncoder):
"""
We define an encoder that takes care of the serialization of numpy types,
which are not handled by json by default
"""
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
else:
return super(MyEncoder, self).default(obj)
def check_json_extension(filepath):
if not filepath.endswith('.json'):
filepath += '.json'
return filepath
def save_json_model(model, filepath):
filepath = check_json_extension(filepath)
obj = model_to_dict(model)
with open(filepath, 'w') as fid:
json.dump(obj, fid, cls=MyEncoder)
def load_json_model(filepath):
filepath = check_json_extension(filepath)
with open(filepath, 'r') as fid:
obj = json.load(fid)
model = model_from_dict(obj)
return model
def json_dumps_model(model):
"""
Returns a JSON dump as a string
:param model:
:return:
"""
obj = model_to_dict(model)
return json.dumps(obj,cls=MyEncoder)
def json_loads_model(s):
"""
Loads a model from a string JSON dump
:param s: JSON string
:return:
"""
obj = json.loads(s)
return model_from_dict(obj)
```
#### File: pytfa/redgem/network_expansion.py
```python
import networkx as nx
from cobra import Metabolite, Reaction, Model
from copy import deepcopy
class NetworkExpansion:
def __init__(self, gem, core_subsystems, extracellular_system,
cofactors, small_metabolites, inorganics,
d, n):
"""
A class encapsulating the RedGEM algorithm
:param gem: The studied GEM
:param core_subsystems: Core subsystems
:param extracellular_system: Extracellular metabolite ids
:param cofactors: List of cofactors id
:param small_metabolites: List of small metabolites id
:param inorganics: List of inorganics id
:param d: Degree
:param n: User parameter
"""
# Shallow copy of the GEM : the deepcopy is possibly performed in redgem, before
# calling NetworkExpansion
self._redgem = gem
#self._redgem.name = 'redgem'
self._graph = nx.DiGraph()
# Subsystems
self._core_subsystems = core_subsystems
self._subsystem_count = len(core_subsystems)
self._extracellular_system = extracellular_system
# Dicts to save extracted reactions and metabolites for each subsystem
# TODO: Improve structure definition
dict_of_lists_of_sets = {}
for name in core_subsystems:
dict_of_lists_of_sets[name] = [set() for _ in range(d+1)]
dict_of_dicts_of_lists_of_sets = {}
for name in core_subsystems:
dict_of_dicts_of_lists_of_sets[name] = deepcopy(dict_of_lists_of_sets)
dict_of_int = {}
for name in core_subsystems:
dict_of_int[name] = -1
dict_of_dicts_of_int = {}
for name in core_subsystems:
dict_of_dicts_of_int[name] = deepcopy(dict_of_int)
self._subsystem_reactions = {}
self._subsystem_reactions_id = {}
self._intermediate_reactions_id = deepcopy(dict_of_dicts_of_lists_of_sets)
self._subsystem_metabolites = {}
self._subsystem_metabolites_id = {}
self._intermediate_metabolites_id = deepcopy(dict_of_dicts_of_lists_of_sets)
self._intermediate_paths = deepcopy(dict_of_dicts_of_lists_of_sets)
self._min_distance_sub_to_sub = deepcopy(dict_of_dicts_of_int)
self._intermediate_extracellular_paths = deepcopy(dict_of_lists_of_sets)
self._intermediate_extracellular_metabolites_id = deepcopy(dict_of_lists_of_sets)
self._intermediate_extracellular_reactions_id = deepcopy(dict_of_lists_of_sets)
self._path_dict = {}
# Save others parameters
self._cofactor_pairs = cofactors
self._small_metabolites = small_metabolites
self._inorganics = inorganics
self._d = d
self._n = n
def extract_subsystem_reactions(self, subsystem):
"""
Extracts all reactions of a subsystem and stores them and their id in the corresponding
dictionary.
:param subsystem: Name of the subsystem
:return: Extracted reactions
"""
rxns = set()
rxns_id = set()
for rxn in self._redgem.reactions:
if rxn.subsystem == subsystem:
rxns.add(rxn)
rxns_id.add(rxn.id)
self._subsystem_reactions[subsystem] = rxns
self._subsystem_reactions_id[subsystem] = rxns_id
return rxns
def extract_subsystem_metabolites(self, subsystem):
"""
Extracts all metabolites of a subsystem and stores them and their id in the corresponding
dictionary.
:param subsystem: Name of the subsystem
:return: Extracted metabolites
"""
subsystem_rxns = self._subsystem_reactions[subsystem]
metabolites = set()
metabolites_id = set()
for rxn in subsystem_rxns:
for metabolite in rxn.metabolites:
metabolite_id = metabolite.id
if metabolite_id in self._cofactor_pairs \
or metabolite_id in self._small_metabolites \
or metabolite_id in self._inorganics:
continue
metabolites.add(metabolite)
metabolites_id.add(metabolite.id)
self._subsystem_metabolites[subsystem] = metabolites
self._subsystem_metabolites_id[subsystem] = metabolites_id
return metabolites
def create_new_stoichiometric_matrix(self):
"""
Extracts the new graph without the small metabolites, inorganics and cofactor pairs.
:return: Networkx graph of the new network
"""
kept_rxns = []
kept_metabolites = set()
for rxn in self._redgem.reactions:
metabolites = {}
for metabolite, coefficient in rxn.metabolites.items():
metabolite_id = metabolite.id
if metabolite_id in self._cofactor_pairs \
or metabolite_id in self._small_metabolites \
or metabolite_id in self._inorganics:
continue
new_metabolite = Metabolite(metabolite_id,
formula=metabolite.formula,
name=metabolite.name,
compartment=metabolite.compartment)
metabolites[new_metabolite] = coefficient
kept_metabolites.add(metabolite)
new_rxn = Reaction(rxn.id,
name=rxn.name,
subsystem=rxn.subsystem,
lower_bound=rxn.lower_bound,
upper_bound=rxn.upper_bound)
new_rxn.add_metabolites(metabolites)
kept_rxns.append(new_rxn)
paths_struct = [{} for _ in range(self._d+1)] # Comprehension list to create multiple dicts
to_struct = [""] * (self._d+1)
for metabolite in kept_metabolites:
self._graph.add_node(metabolite.id, paths=paths_struct, to=to_struct)
for rxn in kept_rxns:
for reactant in rxn.reactants:
for product in rxn.products:
self._graph.add_edge(reactant.id, product.id, rxn_id=rxn.id, weight=1)
return self._graph
def breadth_search_subsystems_paths_length_d(self, subsystem_i, subsystem_j, d):
"""
Breadth first search from each metabolite in subsystem i with special stop conditions
during exploration for paths of length d.
This function explores the graph through allowed paths only : this path can't go through
subsystem i or j but must start in i and end in j. The length of each path found is d.
:param subsystem_i: Source subsystem
:param subsystem_j: Destination subsystem
:param d: Path length desired
:return: None
"""
for metabolite_id in self._subsystem_metabolites_id[subsystem_i]:
# Find metabolites at a distance d from metabolite_id
ancestors = {}
frontier = {metabolite_id}
explored = {metabolite_id}
for i in range(d):
new_nodes = set()
for current_node in frontier:
for new_node in set(self._graph.adj[current_node]):
if self.is_node_allowed(new_node, i, explored, subsystem_i, subsystem_j, d):
new_nodes.add(new_node)
# new_node can already be in ancestors if there are 2 paths of same
# length to it
if new_node in ancestors:
ancestors[new_node].append(current_node)
else:
ancestors[new_node] = [current_node]
explored = explored.union(new_nodes)
frontier = new_nodes
# Handle d = 0 case, since it didn't go through the loop
if d == 0 and metabolite_id not in self._subsystem_metabolites_id[subsystem_j]:
frontier = {}
# Retrieve and save metabolites, reactions and paths
for node in frontier:
paths = self.retrieve_all_paths(node, metabolite_id, ancestors)
self._intermediate_paths[subsystem_i][subsystem_j][d] = \
self._intermediate_paths[subsystem_i][subsystem_j][d].union(set(paths))
self.retrieve_intermediate_metabolites_and_reactions(paths, subsystem_i,
subsystem_j, d)
def is_node_allowed(self, node, i, explored, subsystem_i, subsystem_j, d):
"""
Checks whether or not a metabolite is allowed for the current path.
The new node is added if it is not already explored, if it is not in the source subsystem,
and if it is not in the destination subsystem, except if it is the last round
of exploration
:param node: Metabolite id
:param i: Current step
:param explored: Explored node for this path
:param subsystem_i: Source subsystem
:param subsystem_j: Destination subsystem
:param d: Path length desired
:return: Boolean answering the question
"""
if node in explored:
return False
if subsystem_i != subsystem_j and node in self._subsystem_metabolites_id[subsystem_i]:
return False
if i < d-1 and node in self._subsystem_metabolites_id[subsystem_j]:
return False
if i == d-1 and node not in self._subsystem_metabolites_id[subsystem_j]:
return False
return True
def retrieve_all_paths(self, dest_node, src_node, ancestors, init_dict=True):
"""
Retrieves all paths between a source metabolite and a destination metabolite after a
breadth first search.
This function is a recursive function, which makes use of dynamic programming to reduce
its complexity. It uses self._path_dict to store already computed data.
:param dest_node: Destination metabolite
:param src_node: Source metabolite
:param ancestors: Dictionary with ancestors found during the search
:param init_dict: Boolean, for function initialisation
:return: A list of all paths as tuples
"""
if init_dict:
self._path_dict = {}
if dest_node == src_node:
self._path_dict[dest_node] = [(src_node,)]
if dest_node not in self._path_dict:
new_paths = []
for previous_node in ancestors[dest_node]:
for path in self.retrieve_all_paths(previous_node, src_node, ancestors, False):
new_paths.append(path + (dest_node,))
self._path_dict[dest_node] = new_paths
return self._path_dict[dest_node]
def retrieve_intermediate_metabolites_and_reactions(self, paths, subsystem_i, subsystem_j, d):
"""
Retrieves and stores intermediate metabolites and reactions (i.e. M_{i,j}, R_{i,j},
M_{i,i} and R_{i,i}).
This function adds all reactions contained in these paths, and all metabolites between
:param paths: List of paths between subsystems
:param subsystem_i: Source subsystem
:param subsystem_j: Destination subsystem
:param d: Path length
:return: None
"""
for path in paths:
for i in range(len(path)-1):
reaction = self._graph[path[i]][path[i+1]]['rxn_id']
self._intermediate_reactions_id[subsystem_i][subsystem_j][d].add(reaction)
if i > 0:
self._intermediate_metabolites_id[subsystem_i][subsystem_j][d].add(path[i])
def find_min_distance_between_subsystems(self):
"""
Find minimal distance between each subsystems in both directions
:return: Dict with distances
"""
for i in self._core_subsystems:
for j in self._core_subsystems:
for k in range(self._d+1):
# If there path of length d
if self._intermediate_paths[i][j][k]:
self._min_distance_sub_to_sub[i][j] = k
break
# If min distance os not found, then
if self._min_distance_sub_to_sub[i][j] == -1:
pass
return self._min_distance_sub_to_sub
def breadth_search_extracellular_system_paths(self, subsystem, n):
"""
Breadth first search from each metabolite in the extracellular system with special stop
conditions during exploration for paths of length n.
This function explores the graph through allowed paths only : this path can't go through
the extracellular system or the subsystem but must start in the extracellular system and
end in the subsystem. The length of each path found is n.
:param subsystem: Destination subsystem
:param n: Path length desired
:return: None
"""
for metabolite_id in self._extracellular_system:
# Find metabolites at a distance n from metabolite_id
if metabolite_id not in self._graph:
continue
ancestors = {}
frontier = {metabolite_id}
explored = {metabolite_id}
for i in range(n):
new_nodes = set()
for current_node in frontier:
for new_node in set(self._graph.adj[current_node]):
if self.is_node_allowed_extracellular(new_node, i, explored, subsystem, n):
new_nodes.add(new_node)
# new_node can already be in ancestors if there are 2 paths of same
# length to it
if new_node in ancestors:
ancestors[new_node].append(current_node)
else:
ancestors[new_node] = [current_node]
explored = explored.union(new_nodes)
frontier = new_nodes
# Handle n = 0 case, since it didn't go through the loop
if n == 0 and metabolite_id not in self._subsystem_metabolites_id[subsystem]:
frontier = {}
# Retrieve and save metabolites, reactions and paths
for node in frontier:
paths = self.retrieve_all_paths(node, metabolite_id, ancestors)
self._intermediate_extracellular_paths[subsystem][n] = \
self._intermediate_extracellular_paths[subsystem][n].union(set(paths))
self.retrieve_intermediate_extracellular_metabolites_and_reactions(paths, subsystem,
n)
def is_node_allowed_extracellular(self, node, i, explored, subsystem, n):
"""
Checks whether or not a metabolite is allowed for the current path.
The new node is added if it is not already explored, if it is not in the extracellular
system, and if it is not in the destination subsystem except if it is the last round
of exploration
:param node: Metabolite id
:param i: Current step
:param explored: Explored node for this path
:param subsystem: Destination subsystem
:param n: Path length desired
:return: Boolean answering the question
"""
if node in explored:
return False
if node in self._extracellular_system:
return False
if i < n-1 and node in self._subsystem_metabolites_id[subsystem]:
return False
if i == n-1 and node not in self._subsystem_metabolites_id[subsystem]:
return False
return True
def retrieve_intermediate_extracellular_metabolites_and_reactions(self, paths, subsystem, n):
"""
Retrieves and stores intermediate metabolites and reactions for the extracellular system
This function adds all reactions contained in these paths, and all metabolites between
:param paths: List of paths
:param subsystem: Destination subsystem
:param n: Path length
:return: None
"""
for path in paths:
for i in range(len(path) - 1):
reaction = self._graph[path[i]][path[i + 1]]['rxn_id']
self._intermediate_extracellular_reactions_id[subsystem][n].add(reaction)
if i > 0:
self._intermediate_extracellular_metabolites_id[subsystem][n].add(path[i])
def run_between_all_subsystems(self):
"""
Retrieve subsystem and intermediate reactions and metabolites.
:return: None
"""
for subsystem in self._core_subsystems:
self.extract_subsystem_reactions(subsystem)
self.extract_subsystem_metabolites(subsystem)
for subsystem_i in self._core_subsystems:
for subsystem_j in self._core_subsystems:
for k in range(self._d+1):
self.breadth_search_subsystems_paths_length_d(subsystem_i, subsystem_j, k)
def run_extracellular_system(self):
"""
Retrieve intermediate reactions and metabolites for the extracellular system
:return: None
"""
for subsystem in self._core_subsystems:
for k in range(self._n + 1):
self.breadth_search_extracellular_system_paths(subsystem, k)
def extract_sub_network(self):
"""
Extracts the reduced gem.
:return: None
"""
def extract_id(x):
return x.id
to_remove_metabolites = set(map(extract_id, self._redgem.metabolites))
to_remove_reactions = set(map(extract_id, self._redgem.reactions))
# Keep subsystems reactions and metabolites
for name in self._core_subsystems:
to_remove_reactions = to_remove_reactions - self._subsystem_reactions_id[name]
to_remove_metabolites = to_remove_metabolites - self._subsystem_metabolites_id[name]
# Keep intermediate reactions and metabolites
for i in self._core_subsystems:
for j in self._core_subsystems:
for k in range(self._d+1):
to_remove_reactions = to_remove_reactions \
- self._intermediate_reactions_id[i][j][k]
to_remove_metabolites = to_remove_metabolites \
- self._intermediate_metabolites_id[i][j][k]
# Keep extracellular metabolites
to_remove_metabolites = to_remove_metabolites - set(self._extracellular_system)
# Keep intermediate extracellular reactions and metabolites
for i in self._core_subsystems:
for k in range(self._d+1):
to_remove_reactions = to_remove_reactions \
- self._intermediate_extracellular_reactions_id[i][k]
to_remove_metabolites = to_remove_metabolites \
- self._intermediate_extracellular_metabolites_id[i][k]
self._redgem.remove_reactions(to_remove_reactions, True)
def run(self):
"""
Runs RedGEM.
:return: None
"""
self.create_new_stoichiometric_matrix()
self.run_between_all_subsystems()
self.run_extracellular_system()
self.extract_sub_network()
return self._redgem
```
#### File: 12rambau/pytfa/setup.py
```python
from setuptools import setup, find_packages
# import os
# from pip.req import parse_requirements
# from pip.download import PipSession
# __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
#
# def read_requirements():
# '''parses requirements from requirements.txt'''
# reqs_path = os.path.join(__location__, 'requirements.txt')
# install_reqs = parse_requirements(reqs_path, session=PipSession())
# reqs = [str(ir.req) for ir in install_reqs]
# return reqs
version_tag = '0.9.3'
setup(name='pytfa',
version=version_tag,
author='pyTFA team',
author_email='<EMAIL>',
url='https://github.com/EPFL-LCSB/pytfa/',
download_url='https://github.com/EPFL-LCSB/pytfa/archive/'+version_tag+'.tar.gz',
install_requires=['cobra>0.13',
'bokeh>=0.12.1',
'networkx',
'optlang',
'pytest',
'scipy',
'tqdm'],
packages = find_packages(),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4',
description='pyTFA, Thermodynamics-based Flux Analysis in Python',
keywords=['pytfa','tfa','thermodynamics','flux analysis'],
license='Apache 2.0',
# See https://PyPI.python.org/PyPI?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Environment :: Console',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
``` |
{
"source": "12rambau/sdg_indicators_module",
"score": 3
} |
#### File: component/model/indicator_model.py
```python
from sepal_ui import model
from traitlets import Any
from component import parameter as pm
class IndicatorModel(model.Model):
#####################
## input ##
#####################
# times
start = Any(None).tag(sync=True)
end = Any(None).tag(sync=True)
# sensors
sensors = Any(None).tag(sync=True)
# Vegetation indices
vegetation_index = Any(None).tag(sync=True)
# trajectory
trajectory = Any(None).tag(sync=True)
lceu = Any(None).tag(sync=True)
# matrix, change output format to a plain list. we need it to remap the land cover instead of a matrix.
transition_matrix = Any(pm.default_trans_matrix).tag(sync=True)
# Climate regime
conversion_coef = Any(None).tag(sync=True)
######################
## output ##
######################
land_cover = Any(None).tag(sync=True)
soc = Any(None).tag(sync=True)
productivity = Any(None).tag(sync=True)
productivity_trend = Any(None).tag(sync=True)
productivity_state = Any(None).tag(sync=True)
productivity_performance = Any(None).tag(sync=True)
indicator_15_3_1 = Any(None).tag(sync=True)
def folder_name(self):
"""Return all the parameter formated as a string"""
# get the dates
start = self.start
end = self.end
# create the sensor list
if "l" in self.sensors[0]:
# get only the number of the landsat satelites
names = [pm.sensors[s][2][1] for s in self.sensors]
sensor = f"l{''.join(names)}"
else:
sensor = pm.sensors[self.sensors[0]][2]
# get the vegetation index
vegetation_index = self.vegetation_index
# get the trajectory
trajectory = self.trajectory.replace("_trend", "")
# get land cover ecosystem unit
lceu = self.lceu
# get info on the transition matrix
matrix = (
"default" if self.transition_matrix == pm.default_trans_matrix else "custom"
)
# get the climate regime
climate = f"cr{int(self.conversion_coef*100)}"
return f"{start}_{end}_{sensor}_{vegetation_index}_{lceu}_{matrix}_{climate}"
```
#### File: component/scripts/bar_plot.py
```python
from matplotlib import pyplot as plt
from component import parameter as cp
def bar_plot(df):
# create the figure
fig, ax = plt.subplots(figsize=(10, 9))
# plot the dataframe
df.plot.bar(
rot=0,
color=cp.legend,
ax=ax,
edgecolor="black",
fontsize=12,
)
# fix the design of the plot
ax.set_xlabel("Land cover")
ax.set_yscale("log")
ax.set_ylabel("Area in ha")
ax.set_title("Distribution of area by land cover type", fontweight="bold")
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["left"].set_visible(False)
return fig, ax
``` |
{
"source": "12rambau/SDG_indicators_module",
"score": 3
} |
#### File: component/scripts/download.py
```python
import time
import rasterio as rio
from rasterio.merge import merge
from matplotlib.colors import to_rgba
from matplotlib import pyplot as plt
from component.message import ms
from component import parameter as pm
from .gdrive import gdrive
def digest_tiles(filename, result_dir, output, tmp_file):
if tmp_file.is_file():
output.add_live_msg(ms.download.file_exist.format(tmp_file), "warning")
time.sleep(2)
return
drive_handler = gdrive()
files = drive_handler.get_files(filename)
# if no file, it means that the download had failed
if not len(files):
raise Exception(ms.gdrive.error.no_file)
drive_handler.download_files(files, result_dir)
pathname = f"{filename}*.tif"
files = [file for file in result_dir.glob(pathname)]
# run the merge process
output.add_live_msg(ms.download.merge_tile)
# manual open and close because I don't know how many file there are
sources = [rio.open(file) for file in files]
data, output_transform = merge(sources)
out_meta = sources[0].meta.copy()
out_meta.update(nodata=0)
out_meta.update(
driver="GTiff",
height=data.shape[1],
width=data.shape[2],
transform=output_transform,
compress="lzw",
)
# create a colormap
colormap = {}
for i, color in enumerate(pm.legend.values()):
color = tuple(int(c * 255) for c in to_rgba(color))
colormap[i + 1] = color
with rio.open(tmp_file, "w", **out_meta) as dest:
dest.write(data)
dest.write_colormap(1, colormap)
# manually close the files
[src.close() for src in sources]
# delete local files
[file.unlink() for file in files]
return
def export_legend(filename, colors, title):
"""
Create a color list and display it in a png image.
Args:
filename (pathlib.Path); the path where to save the file
colors (dict): the color palette to create. It should use the following format: {classname: hex_color, ...}
Return:
(pathlib.Path) the filename
"""
# create a color list
color_map = [*colors.values()]
columns = ["entry"]
rows = [" " * 10 for i in range(len(colors))] # trick to see the first column
cell_text = [[name] for name in colors]
fig, ax = plt.subplots(1, 1, figsize=[6.4, 8.6])
# remove the graph box
ax.axis("tight")
ax.axis("off")
# set the tab title
ax.set_title(title)
# create the table
the_table = ax.table(
colColours=[to_rgba("lightgrey")],
cellText=cell_text,
rowLabels=rows,
colWidths=[0.4],
rowColours=color_map,
colLabels=columns,
loc="center",
)
the_table.scale(1, 1.5)
# save & close
plt.savefig(filename)
plt.close()
return
``` |
{
"source": "12rambau/sdg_indicators_module",
"score": 3
} |
#### File: component/scripts/land_cover.py
```python
import ee
from component import parameter as pm
ee.Initialize()
def land_cover(model, aoi_model, output):
"""Calculate land cover indicator"""
# load the land cover map
landcover = ee.Image(pm.land_cover).clip(
aoi_model.feature_collection.geometry().bounds()
)
landcover = landcover.where(landcover.eq(9999), pm.int_16_min).updateMask(
landcover.neq(pm.int_16_min)
)
# Remap LC according to input matrix, aggregation of land cover classes to IPCC classes.
lc_year_start = min(
max(model.start, pm.land_cover_first_year), pm.land_cover_max_year
)
lc_year_end = min(max(model.end, pm.land_cover_first_year), pm.land_cover_max_year)
landcover_start = landcover.select(f"year_{lc_year_start}").rename(
"landcover_start"
)
landcover_end = landcover.select(f"year_{lc_year_end}").rename("landcover_end")
# baseline land cover map reclassified into IPCC classes
landcover_start_remapped = landcover_start.remap(
pm.translation_matrix[0], pm.translation_matrix[1]
).rename("start")
# target land cover map reclassified into IPCC classes
landcover_end_remapped = landcover_end.remap(
pm.translation_matrix[0], pm.translation_matrix[1]
).rename("end")
water_mask = landcover_end.where(landcover_end.eq(210), 0).rename("water")
# compute transition map (first digit for historical land cover, and second digit for monitoring year land cover)
landcover_transition = (
landcover_start_remapped.multiply(10)
.add(landcover_end_remapped)
.rename("transition")
)
# definition of land cover transitions as degradation (-1), improvement (1), or no relevant change (0)
trans_matrix_flatten = [
item for sublist in model.transition_matrix for item in sublist
]
landcover_degredation = landcover_transition.remap(
pm.IPCC_lc_change_matrix, trans_matrix_flatten
)
# use the byte convention
# 1 degraded - 2 stable - 3 improved
landcover_degredation = (
landcover_degredation.remap([1, 0, -1, pm.int_16_min], [3, 2, 1, 0])
.uint8()
.rename("degradation")
)
land_cover_out = (
landcover_degredation.addBands(landcover_transition.uint8())
.addBands(landcover_start_remapped.uint8())
.addBands(landcover_end.uint8())
.addBands(landcover_end_remapped.uint8())
.addBands(landcover_transition.uint8())
.addBands(water_mask.uint8())
)
return land_cover_out
```
#### File: component/scripts/run_15_3_1.py
```python
from zipfile import ZipFile
import time
from itertools import product
import ee
import geemap
from ipywidgets import Output
import ipyvuetify as v
import geopandas as gpd
import pandas as pd
from sepal_ui.scripts import utils as su
from component import parameter as pm
from component.message import ms
from .gdrive import gdrive
from .gee import wait_for_completion
from .download import digest_tiles
from .integration import *
from .productivity import *
from .soil_organic_carbon import *
from .land_cover import *
ee.Initialize()
def download_maps(aoi_model, model, output):
# create a result folder including the data parameters
# create the aoi and parameter folder if not existing
aoi_dir = pm.result_dir / su.normalize_str(aoi_model.name)
result_dir = aoi_dir / model.folder_name()
result_dir.mkdir(parents=True, exist_ok=True)
# get the export scale
# from the first sensor (we only combine compatible one)
scale = pm.sensors[model.sensors[0]][1]
output.add_live_msg(ms.download.start_download)
# create the export path
# they are in correct order don't change it
pattern = f"{aoi_model.name}_{model.folder_name()}"
layers = {
f"land_cover": model.land_cover,
f"soc": model.soc,
f"productivity_trend": model.productivity_trend,
f"productivity_performance": model.productivity_state,
f"productivity_state": model.productivity_state,
f"productivity_indicator": model.productivity,
f"indicator_15_3_1": model.indicator_15_3_1,
}
# load the drive_handler
drive_handler = gdrive()
# clip the images if it's an administrative layer and keep the bounding box if not
if aoi_model.feature_collection:
geom = aoi_model.feature_collection.geometry()
layers = {name: layer.clip(geom) for name, layer in layers.items()}
# download all files
downloads = any(
[
drive_handler.download_to_disk(name, layer, aoi_model, output, scale)
for name, layer in layers.items()
]
)
# I assume that they are always launch at the same time
# If not it's going to crash
if downloads:
wait_for_completion([name for name in layers], output)
output.add_live_msg(ms.gee.tasks_completed, "success")
# digest the tiles
for name in layers:
digest_tiles(
name, result_dir, output, result_dir / f"{pattern}_{name}_merge.tif"
)
output.add_live_msg(ms.download.remove_gdrive)
# remove the files from drive
for name in layers:
drive_handler.delete_files(drive_handler.get_files(name))
# display msg
output.add_live_msg(ms.download.completed, "success")
return tuple([result_dir / f"{pattern}_{name}_merge.tif" for name in layers])
def display_maps(aoi_model, model, m, output):
m.zoom_ee_object(aoi_model.feature_collection.geometry())
# get the geometry to clip on
geom = aoi_model.feature_collection.geometry()
# clip on the bounding box when we use a custom aoi
if not ("ADMIN" in aoi_model.method):
geom = geom.bounds()
lc_year_start = min(
max(model.start, pm.land_cover_first_year), pm.land_cover_max_year
)
lc_year_end = min(max(model.end, pm.land_cover_first_year), pm.land_cover_max_year)
# add the layers
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.lc_layer))
m.addLayer(
model.land_cover.select("start").clip(geom),
pm.viz_lc,
ms._15_3_1.lc_start.format(lc_year_start),
)
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.lc_layer))
m.addLayer(
model.land_cover.select("end").clip(geom),
pm.viz_lc,
ms._15_3_1.lc_end.format(lc_year_end),
)
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.prod_layer))
m.addLayer(
model.productivity.clip(geom).selfMask(), pm.viz_prod, ms._15_3_1.prod_layer
)
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.lc_layer))
m.addLayer(
model.land_cover.select("degradation").clip(geom).selfMask(),
pm.viz_lc_sub,
ms._15_3_1.lc_layer,
)
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.soc_layer))
m.addLayer(model.soc.clip(geom).selfMask(), pm.viz_soc, ms._15_3_1.soc_layer)
output.add_live_msg(ms.gee.add_layer.format(ms._15_3_1.ind_layer))
m.addLayer(
model.indicator_15_3_1.clip(geom).selfMask(),
pm.viz_indicator,
ms._15_3_1.ind_layer,
)
# add the aoi on the map
empty = ee.Image().byte()
aoi_line = empty.paint(
**{"featureCollection": aoi_model.feature_collection, "color": 1, "width": 2}
)
m.addLayer(aoi_line, {"palette": v.theme.themes.dark.accent}, "aoi")
output.add_live_msg(ms._15_3_1.map_loading_complete, "success")
return
def compute_indicator_maps(aoi_model, model, output):
# raise an error if the years are not in the right order
if not (model.start < model.end):
raise Exception(ms._15_3_1.error.wrong_year)
# compute intermediary maps
vi_int, climate_int = integrate_ndvi_climate(aoi_model, model, output)
model.productivity_trend = productivity_trajectory(
model, vi_int, climate_int, output
)
model.productivity_performance = productivity_performance(
aoi_model, model, vi_int, climate_int, output
)
model.productivity_state = productivity_state(aoi_model, model, vi_int, output)
# compute result maps
model.land_cover = land_cover(model, aoi_model, output)
model.soc = soil_organic_carbon(model, aoi_model, output)
model.productivity = productivity_final(
model.productivity_trend,
model.productivity_performance,
model.productivity_state,
output,
)
# sum up in a map
model.indicator_15_3_1 = indicator_15_3_1(
model.productivity, model.land_cover, model.soc, output
)
return
def compute_lc_transition_stats(aoi_model, model):
"""function to calculate the statistics of land cover transitions between two years to be used as input for the sankey diagram.
input: ee.Image(land cover transition)
retun: DataFrame.
"""
landcover = model.land_cover.select("transition")
scale = pm.sensors[model.sensors[0]][1]
aoi = aoi_model.feature_collection.geometry().bounds()
lc_year_start = min(
max(model.start, pm.land_cover_first_year), pm.land_cover_max_year
)
lc_year_end = min(max(model.end, pm.land_cover_first_year), pm.land_cover_max_year)
lc_name = [*pm.lc_color]
# make a list of all the possible transitions values
class_value = [x * 10 + y for x, y in product(range(1, 8), repeat=2)]
# make a list of all the possible transitions classes
class_name = [x + "_" + y for x, y in product(lc_name, repeat=2)]
# creat a multi band image with all the classes as bands
multiband_class = landcover.eq(class_value).rename(class_name)
# calculate the area
pixel_area = multiband_class.multiply(ee.Image.pixelArea().divide(10000))
area_per_class = pixel_area.reduceRegion(
**{
"reducer": ee.Reducer.sum(),
"geometry": aoi,
"scale": scale,
"maxPixels": 1e13,
"bestEffort": True,
"tileScale": 2,
}
)
data = area_per_class.getInfo()
# split the transition names and organise the data
df = [[*[i for i in x.split("_")], y] for x, y in data.items()]
# convert to a DataFrame
df = pd.DataFrame(data=df, columns=[lc_year_start, lc_year_end, "Area"])
return df
def compute_stats_by_lc(aoi_model, model):
"""
tabulate the area by land cover categories.
input: ee.Image(ending land cover, final indicator)
return: DataFrame
"""
# land cover
landcover = model.land_cover.select("end")
# final indicator
indicator = model.indicator_15_3_1
aoi = aoi_model.feature_collection.geometry().bounds()
lc_name = [*pm.lc_color]
deg_name = [*pm.legend]
# combine indicator and land cover together.
# first digit represents the indicator, second digit represents land cover categories
lc_deg_combine = indicator.multiply(10).add(landcover)
# all possible combined values
class_value = [x * 10 + y for x, y in product(range(1, 4), range(1, 8))]
# all possible combined categories
class_name = [x + "_" + y for x, y in product(deg_name, lc_name)]
# creat a multi band image with all the categories as bands
multiband_class = lc_deg_combine.eq(class_value).rename(class_name)
# calculate the area
pixel_area = multiband_class.multiply(ee.Image.pixelArea().divide(10000))
area_per_class = pixel_area.reduceRegion(
**{
"reducer": ee.Reducer.sum(),
"geometry": aoi,
"scale": 300,
"maxPixels": 1e13,
"bestEffort": True,
"tileScale": 8,
}
)
data = area_per_class.getInfo()
# split and organise the data
df = [[*[i for i in x.split("_")], y] for x, y in data.items()]
# convert to a DataFrame
df = pd.DataFrame(data=df, columns=["Indicator", "Landcover", "Area"])
return df
def compute_zonal_analysis(aoi_model, model, output):
# create a result folder including the data parameters
# create the aoi and parameter folder if not existing
aoi_dir = pm.result_dir / su.normalize_str(aoi_model.name)
result_dir = aoi_dir / model.folder_name()
result_dir.mkdir(parents=True, exist_ok=True)
indicator_stats = (
result_dir / f"{aoi_model.name}_{model.folder_name()}_indicator_15_3_1"
)
# check if the file already exist
indicator_zip = indicator_stats.with_suffix(".zip")
if indicator_zip.is_file():
output.add_live_msg(ms.download.already_exist.format(indicator_zip), "warning")
return indicator_zip
output_widget = Output()
output.add_msg(output_widget)
# to be removed when moving to shp
indicator_csv = indicator_stats.with_suffix(".csv")
scale = 100 if "Sentinel 2" in model.sensors else 300
with output_widget:
geemap.zonal_statistics_by_group(
in_value_raster=model.indicator_15_3_1,
in_zone_vector=aoi_model.feature_collection,
out_file_path=indicator_csv,
statistics_type="SUM",
denominator=1000000,
decimal_places=2,
scale=scale,
tile_scale=1.0,
)
# this should be removed once geemap is repaired
#########################################################################
aoi_json = geemap.ee_to_geojson(aoi_model.feature_collection)
aoi_gdf = gpd.GeoDataFrame.from_features(aoi_json).set_crs("EPSG:4326")
indicator_df = pd.read_csv(indicator_csv)
if "Class_0" in indicator_df.columns:
aoi_gdf["NoData"] = indicator_df["Class_0"]
if "Class_3" in indicator_df.columns:
aoi_gdf["Improve"] = indicator_df["Class_3"]
if "Class_2" in indicator_df.columns:
aoi_gdf["Stable"] = indicator_df["Class_2"]
if "Class_1" in indicator_df.columns:
aoi_gdf["Degrade"] = indicator_df["Class_1"]
aoi_gdf = aoi_gdf[aoi_gdf.geom_type != "LineString"]
aoi_gdf.to_file(indicator_stats.with_suffix(".shp"))
#########################################################################
# get all the shp extentions
suffixes = [".dbf", ".prj", ".shp", ".cpg", ".shx"] # , '.fix']
# write the zip file
with ZipFile(indicator_zip, "w") as myzip:
for suffix in suffixes:
file = indicator_stats.with_suffix(suffix)
myzip.write(file, file.name)
output.add_live_msg(ms._15_3_1.stats_complete.format(indicator_zip), "success")
return indicator_zip
def indicator_15_3_1(productivity, landcover, soc, output):
water = landcover.select("water")
landcover = landcover.select("degradation")
indicator = (
ee.Image(0)
.where(productivity.eq(3).And(landcover.eq(3)).And(soc.eq(3)), 3)
.where(productivity.eq(3).And(landcover.eq(3)).And(soc.eq(2)), 3)
.where(productivity.eq(3).And(landcover.eq(3)).And(soc.eq(1)), 1)
.where(productivity.eq(3).And(landcover.eq(2)).And(soc.eq(3)), 3)
.where(productivity.eq(3).And(landcover.eq(2)).And(soc.eq(2)), 3)
.where(productivity.eq(3).And(landcover.eq(2)).And(soc.eq(1)), 1)
.where(productivity.eq(3).And(landcover.eq(1)).And(soc.eq(3)), 1)
.where(productivity.eq(3).And(landcover.eq(1)).And(soc.eq(2)), 1)
.where(productivity.eq(3).And(landcover.eq(1)).And(soc.eq(1)), 1)
.where(productivity.eq(2).And(landcover.eq(3)).And(soc.eq(3)), 3)
.where(productivity.eq(2).And(landcover.eq(3)).And(soc.eq(2)), 3)
.where(productivity.eq(2).And(landcover.eq(3)).And(soc.eq(1)), 1)
.where(productivity.eq(2).And(landcover.eq(2)).And(soc.eq(3)), 3)
.where(productivity.eq(2).And(landcover.eq(2)).And(soc.eq(2)), 2)
.where(productivity.eq(2).And(landcover.eq(2)).And(soc.eq(1)), 1)
.where(productivity.eq(2).And(landcover.eq(1)).And(soc.eq(3)), 1)
.where(productivity.eq(2).And(landcover.eq(1)).And(soc.eq(2)), 1)
.where(productivity.eq(2).And(landcover.eq(1)).And(soc.eq(1)), 1)
.where(productivity.eq(1).And(landcover.eq(3)).And(soc.eq(3)), 1)
.where(productivity.eq(1).And(landcover.eq(3)).And(soc.eq(2)), 1)
.where(productivity.eq(1).And(landcover.eq(3)).And(soc.eq(1)), 1)
.where(productivity.eq(1).And(landcover.eq(2)).And(soc.eq(3)), 1)
.where(productivity.eq(1).And(landcover.eq(2)).And(soc.eq(2)), 1)
.where(productivity.eq(1).And(landcover.eq(2)).And(soc.eq(1)), 1)
.where(productivity.eq(1).And(landcover.eq(1)).And(soc.eq(3)), 1)
.where(productivity.eq(1).And(landcover.eq(1)).And(soc.eq(2)), 1)
.where(productivity.eq(1).And(landcover.eq(1)).And(soc.eq(1)), 1)
.where(productivity.eq(1).And(landcover.lt(1)).And(soc.lt(1)), 1)
.where(productivity.lt(1).And(landcover.eq(1)).And(soc.lt(1)), 1)
.where(productivity.lt(1).And(landcover.lt(1)).And(soc.eq(1)), 1)
.where(productivity.eq(2).And(landcover.lt(1)).And(soc.lt(1)), 2)
.where(productivity.lt(1).And(landcover.eq(2)).And(soc.lt(1)), 2)
.where(productivity.lt(1).And(landcover.lt(1)).And(soc.eq(2)), 2)
.where(productivity.eq(3).And(landcover.lt(1)).And(soc.lt(1)), 3)
.where(productivity.lt(1).And(landcover.eq(3)).And(soc.lt(1)), 3)
.where(productivity.lt(1).And(landcover.lt(1)).And(soc.eq(3)), 3)
.updateMask(water)
)
return indicator.uint8()
``` |
{
"source": "12rambau/SDG_indicators_module",
"score": 2
} |
#### File: component/tile/reclassify_tile.py
```python
from sepal_ui import reclassify as rec
from sepal_ui import sepalwidgets as sw
from component import parameter as cp
class ReclassifyTile(rec.ReclassifyView):
def __init__(self):
super().__init__(
gee=True,
default_class={"IPCC CLASSES": str(cp.utils_dir / "UNCCD.csv")},
save=True,
)
# change the title
self.title.children[0].children = ["Adapt Land Cover map"]
# remove the custom option
# tmp_list = self.w_default.children.copy()
# self.w_default.children = tmp_list[1:]
# select IPCC by default
self.w_default.children[1].fire_event("click", None)
# remove optional panel
self.w_optional.class_ = "d-none"
# change the metadata of the tile
self._metadata = {"mount_id": "reclassify_tile"}
# clean w_image image type to have only image
# TODO, uncomment when the sepal_ui lib method will be available
# self.w_image.types = ["IMAGE"]
# self.w_image._get_items()
```
#### File: component/widget/picker_line_productivity.py
```python
import ipyvuetify as v
from component import parameter as pm
from component.message import ms
class PickerLineProductivity(v.Layout):
YEAR_RANGE = [y for y in range(pm.sensor_max_year, pm.L4_start - 1, -1)]
def __init__(self, model):
self.model = model
# create the widgets
self.trend_start_picker = v.Select(
label=ms.trend_start_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
self.trend_end_picker = v.Select(
label=ms.trend_end_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
self.state_start_picker = v.Select(
label=ms.state_start_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
self.state_end_picker = v.Select(
label=ms.state_end_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
self.performance_start_picker = v.Select(
label=ms.performance_start_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
self.performance_end_picker = v.Select(
label=ms.performance_end_lbl,
items=self.YEAR_RANGE,
xs4=True,
v_model=None,
class_="ml-5 mr-5",
)
# bind them to the output
model.bind(self.trend_start_picker, "trend_start").bind(
self.trend_end_picker, "trend_end"
).bind(self.state_start_picker, "state_start").bind(
self.state_end_picker, "state_end"
).bind(
self.performance_start_picker, "performance_start"
).bind(
self.performance_end_picker, "performance_end"
)
super().__init__(
row=True,
children=[
v.Flex(xs12=True, md6=True, children=[self.trend_start_picker]),
v.Flex(xs12=True, md6=True, children = [self.trend_end_picker]),
v.Flex(xs12=True, md6=True, children=[self.state_start_picker]),
v.Flex(xs12=True, md6=True, children=[self.state_end_picker]),
v.Flex(xs12=True, md6=True, children=[self.performance_start_picker]),
v.Flex(xs12=True, md6=True, children=[self.performance_end_picker])
],
)
```
#### File: component/widget/select_lc.py
```python
from sepal_ui import sepalwidgets as sw
from sepal_ui.scripts import utils as su
import ee
from natsort import natsorted
import ipyvuetify as v
from component.message import ms
from component import parameter as cp
ee.Initialize()
class SelectLC(v.Layout):
def __init__(self, label="select Land Cover"):
# create the layout
super().__init__(row=True, xs12=True)
# set up the content
self.w_image = sw.AssetSelect(types=["IMAGE"], label=label)
self.w_band = v.Select(label="band", v_model=None, items=None, class_="pl-5")
# create the children item
self.children = [
v.Flex(xs8=True, children=[self.w_image]),
v.Flex(xs4=True, children=[self.w_band]),
]
# js behaviour
self.w_image.observe(self._validate, "v_model")
@su.switch("loading", "disabled", on_widgets=["w_image"])
def _validate(self, change):
"""
Validate the selected access. Throw an error message if is not accesible.
If the asset can be accessed check that it only include values within the classification"""
w = self.w_image # it's also change["owner"]
w._validate(change)
# only check the values if I have access to the asset
if w.valid == False:
return
# the asset need to be an image
if not w.asset_info["type"] == "IMAGE":
w.asset_info = None
w.valid = False
w.error = True
w.error_messages = ms.select_lc.not_image
return
# call the band list update
self._update_bands()
return
@su.switch("loading", "disabled", on_widgets=["w_band"])
def _update_bands(self):
"""Update the band possibility to the available bands/properties of the input"""
# update the bands values
self.w_band.v_model = None
self.w_band.items = natsorted(
ee.Image(self.w_image.v_model).bandNames().getInfo()
)
return
``` |
{
"source": "12rambau/sepal_geospatial_toolkit",
"score": 3
} |
#### File: sepal_geospatial_toolkit/sgt/sgt_rasterize.py
```python
import sys
import pathlib
import argparse
import geopandas as gpd
from geocube.api.core import make_geocube
import rasterio as rio
import numpy as np
from sgt.utils import custom_print
def rasterize(src_vector, out_rst, res=30, column=None, verbose=False):
""" Burns vector geometries into a raster.
Args :
src_vector (str) : path to the source vector shapefile
out_rst (str, optional) : path to the output raster
res (int, optional) : the resolution of the output raster. If none, the default landsat 7 30m res will be used
column (str, optional) : the name of the column to use as value in the output raster. default ot the first one
"""
# apply the verbose option
v_print = custom_print(verbose)
# read the vector data
gdf = gpd.read_file(src_vector).to_crs("EPSG:4326")
# identify the column to be burn in the raster
if not column:
column = gdf.columns[0]
# optimize dtype
dtype = rio.dtypes.get_minimum_dtype(gdf[column])
# optimize the nodata value to meet the dtype
fill = np.nan
if np.issubdtype(dtype, np.integer):
fill = 0
# convert the metric resolution into deg (as we work in EPSG 4326)
# consider the equator approximation : 1° = 111 Km
res = (res/111)*(10**(-3))
out_grid = make_geocube(
vector_data = gdf,
measurements = [column],
resolution = (-res, res),
fill = fill
)
# write the column to raster file
out_grid[column].rio.to_raster(out_rst, dtype=dtype)
v_print(f'The vector geometries have been burn into the raster : {out_rst}')
return
if __name__ == "__main__":
# write the description
descript = "Burns vector geometries into a raster"
# create an arg parser
parser = argparse.ArgumentParser(description=descript)
# read arguments
parser.add_argument(
'-i',
dest = 'src_vector',
metavar = 'source.shp',
help = '(str) : path to the source vector file',
required = True,
type = pathlib.Path
)
parser.add_argument(
'-o',
dest = 'out_rst',
metavar = 'output.tif',
help = '(str) : path to the output raster',
required = True,
type = pathlib.Path
)
parser.add_argument(
'-res',
dest = 'res',
metavar = 'a_number',
help = '(int) : the resolution of the output raster. If none, the default landsat 7 30m res will be used',
required = True,
type = pathlib.Path
)
parser.add_argument(
'-c',
dest = 'column',
metavar = 'a_name',
help = '(str) : the name of the column to use as value in the output raster. default ot the first one ',
required = False,
type = str
)
parser.add_argument(
'--no-v',
dest = 'verbose',
action='store_false',
required = False,
help = "remove the verbose option"
)
# read arguments
args = parser.parse_args()
# launch the function
rasterize(**vars(args))
```
#### File: sepal_geospatial_toolkit/sgt/utils.py
```python
def custom_print(verbose=False):
"""return a print function that does nothing if the verbose parameter is set to false and everything if true"""
if verbose:
# print the message
def v_print(msg):
print(msg)
else:
# do nothing function
v_print = lambda msg: None
return v_print
``` |
{
"source": "12rambau/sepal_translator",
"score": 3
} |
#### File: component/tile/translator_tile.py
```python
from pathlib import Path
from shutil import copyfile
from sepal_ui import sepalwidgets as sw
from traitlets import Unicode
class TranslatorTile(sw.Tile):
def __init__(self, user_folder):
# gather the info
self.folder = Path(user_folder).expanduser()
# set the available locales
self.locales = self._get_locales()
# initialized the draft files (if needed)
self._init_drafts()
# create the actual tile
super().__init__(
'translator_tile',
"Translate module",
inputs = [],
output = sw.Alert(),
btn = sw.Btn("validate translation")
)
def _get_locales(self):
"""read the folder searching for json files and get the available languages of the app"""
locales = [f.stem for f in self.folder.glob('*.json')]
# check that at least english exist
if not ('en' in locales):
raise Exception("You don't have a dict for the source language (\"en\")")
# check that there is at least one target
if len(locales) < 2:
raise Exception("You don't have any target language")
return locales
def _init_drafts(self):
"""init the draft files if needed, we'll use the already existing one if they exist"""
# we don't create drafts fo en.json as it is the reference
locales = [l for l in self.locales if l != 'en']
# create a draft file if needed
for l in locales:
valid = self.folder.joinpath(f'{l}.json')
draft = valid.with_suffix(valid.suffix + '.draft')
if not draft.is_file():
copyfile(valid, draft)
return self
``` |
{
"source": "12rambau/sepal_ui",
"score": 2
} |
#### File: sepal_ui/reclassify/reclassify_view.py
```python
from pathlib import Path
from traitlets import Unicode
import ipyvuetify as v
import pandas as pd
from .parameters import NO_VALUE, MATRIX_NAMES
import sepal_ui.sepalwidgets as sw
from sepal_ui.scripts import utils as su
from sepal_ui.message import ms
from sepal_ui.scripts.utils import loading_button
from .reclassify_model import ReclassifyModel
__all__ = ["ReclassifyView"]
class ImportMatrixDialog(v.Dialog):
"""
Dialog to select the file to use and fill the matrix
Args:
folder (pathlike object): the path to the saved classifications
Attributes:
file (str): the file to use
"""
file = Unicode("").tag(sync=True)
def __init__(self, folder, **kwargs):
# create the 3 widgets
title = v.CardTitle(children=["Load reclassification matrix"])
self.w_file = sw.FileInput(label="filename", folder=folder)
self.load_btn = sw.Btn("Load")
cancel = sw.Btn("Cancel", outlined=True)
actions = v.CardActions(children=[cancel, self.load_btn])
# default params
self.value = False
self.max_width = 500
self.overlay_opacity = 0.7
self.persistent = True
self.children = [v.Card(class_="pa-4", children=[title, self.w_file, actions])]
# create the dialog
super().__init__(**kwargs)
# js behaviour
cancel.on_event("click", self._cancel)
def _cancel(self, widget, event, data):
"""exit and do nothing"""
self.value = False
return self
def show(self):
self.value = True
return self
class SaveMatrixDialog(v.Dialog):
"""
Dialog to setup the name of the output matrix file
Args:
folder (pathlike object): the path to the save folder. default to ~/
"""
def __init__(self, folder=Path.home(), **kwargs):
# save the matrix
self._matrix = {}
self.folder = Path(folder)
# create the widgets
title = v.CardTitle(children=["Save matrix"])
self.w_file = v.TextField(label="filename", v_model=None)
btn = sw.Btn("Save matrix")
cancel = sw.Btn("Cancel", outlined=True)
actions = v.CardActions(children=[cancel, btn])
self.alert = sw.Alert(children=["Choose a name for the output"]).show()
# default parameters
self.value = False
self.max_width = 500
self.overlay_opcity = 0.7
self.persistent = True
self.children = [
v.Card(class_="pa-4", children=[title, self.w_file, self.alert, actions])
]
# create the dialog
super().__init__(**kwargs)
# js behaviour
cancel.on_event("click", self._cancel)
btn.on_event("click", self._save)
self.w_file.on_event("blur", self._sanitize)
self.w_file.observe(self._store_info, "v_model")
def _store_info(self, change):
"""Display where will be the file written"""
new_val = change["new"]
out_file = self.folder / f"{su.normalize_str(new_val)}.csv"
msg = f"Your file will be saved as: {out_file}"
if not new_val:
msg = "Choose a name for the output"
self.alert.add_msg(msg)
def _cancel(self, widget, event, data):
"""do nothing and exit"""
self.w_file.v_model = None
self.value = False
return self
def _save(self, widget, event, data):
"""save the matrix in a specified file"""
file = self.folder / f"{su.normalize_str(self.w_file.v_model)}.csv"
matrix = pd.DataFrame.from_dict(self._matrix, orient="index").reset_index()
matrix.columns = MATRIX_NAMES
matrix.to_csv(file, index=False)
# hide the dialog
self.value = False
return self
def show(self, matrix):
"""show the dialog and set the matrix values"""
self._matrix = matrix
# Reset file name
self.w_file.v_model = ""
self.value = True
return self
def _sanitize(self, widget, event, data):
"""sanitize the used name when saving"""
if not self.w_file.v_model:
return self
self.w_file.v_model = su.normalize_str(self.w_file.v_model)
return self
class ClassSelect(sw.Select):
"""
Custom widget to pick the value of a original class in the new classification system
Args:
new_codes(dict): the dict of the new codes to use as items {code: (name, color)}
code (int): the orginal code of the class
"""
def __init__(self, new_codes, old_code, **kwargs):
# set default parameters
self.items = [
{"text": f"{code}: {item[0]}", "value": code}
for code, item in new_codes.items()
]
self.dense = True
self.multiple = False
self.chips = True
self._metadata = {"class": old_code}
self.v_model = None
self.clearable = True
# init the select
super().__init__(**kwargs)
class ReclassifyTable(sw.SimpleTable):
"""
Table to store the reclassifying information.
2 columns are integrated, the new class value and the values in the original input
One can select multiple class to be reclassify in the new classification
Args:
model (ReclassifyModel): model embeding the traitlet dict to store the reclassifying matrix. keys: class value in dst, values: list of values in src.
dst_classes (dict|optional): a dictionnary that represent the classes of new the new classification table as {class_code: (class_name, class_color)}. class_code must be ints and class_name str.
src_classes (dict|optional): the list of existing values within the input file {class_code: (class_name, class_color)}
Attributes:
HEADER (list): name of the column header (from, to)
model (ReclassifyModel): the reclassifyModel object to manipulate the
input file and save parameters
"""
HEADERS = ms.rec.rec.headers
def __init__(self, model, dst_classes={}, src_classes={}, **kwargs):
# default parameters
self.dense = True
# create the table
super().__init__(**kwargs)
# save the model
self.model = model
# create the table elements
self._header = [
v.Html(
tag="tr",
children=[v.Html(tag="th", children=[h]) for h in self.HEADERS],
)
]
self.set_table(dst_classes, src_classes)
def set_table(self, dst_classes, src_classes):
"""
Rebuild the table content based on the new_classes and codes provided
Args:
dst_classes (dict|optional): a dictionnary that represent the classes of new the new classification table as {class_code: (class_name, class_color)}. class_code must be ints and class_name str.
src_classes (dict|optional): the list of existing values within the input file {class_code: (class_name, class_color)}
Return:
self
"""
# reset the matrix
self.model.matrix = {code: 0 for code in src_classes.keys()}
# create the select list
# they need to observe each other to adapt the available class list dynamically
self.class_select_list = {
k: ClassSelect(dst_classes, k) for k in src_classes.keys()
}
rows = [
v.Html(
tag="tr",
children=[
v.Html(tag="td", children=[f"{code}: {item[0]}"]),
v.Html(tag="td", children=[self.class_select_list[code]]),
],
)
for code, item in src_classes.items()
]
# add an empty row at the end to make the table more visible when it's empty
rows += [
v.Html(
tag="tr",
children=[
v.Html(tag="td", children=[""]),
v.Html(
tag="td",
children=["" if len(dst_classes) else "No data available"],
),
],
)
]
self.children = [v.Html(tag="tbody", children=self._header + rows)]
# js behaviour
[
w.observe(self._update_matrix_values, "v_model")
for w in self.class_select_list.values()
]
return self
def _update_matrix_values(self, change):
"""Update the appropriate matrix value when a Combo select change"""
# get the code of the class in the src classification
code = change["owner"]._metadata["class"]
# bind it to classes in the dst classification
self.model.matrix[code] = change["new"] if change["new"] else 0
return self
class ReclassifyView(sw.Card):
"""
Stand-alone Card object allowing the user to reclassify a input file. the input can be of any type (vector or raster) and from any source (local or GEE).
The user need to provide a destination classification file (table) in the following format : 3 headless columns: 'code', 'desc', 'color'. Once all the old class have been attributed to their new class the file can be exported in the source format to local memory or GEE. the output is also savec in memory for further use in the app. It can be used as a tile in a sepal_ui app. The id_ of the tile is set to "reclassify_tile"
Args:
model (ReclassifyModel): the reclassify model to manipulate the
classification dataset. default to a new one
class_path (str,optional): Folder path containing already existing
classes. Default to ~/
out_path (str,optional): the folder to save the created classifications.
default to ~/downloads
gee (bool): either or not to set :code:`gee` to True. default to False
dst_class (str|pathlib.Path, optional): the file to be used as destination classification. for app that require specific code system the file can be set prior and the user won't have the oportunity to change it
default_class (dict|optional): the default classification system to use, need to point to existing sytem: {name: absolute_path}
folder(str, optional): the init GEE asset folder where the asset selector should start looking (debugging purpose)
save (bool, optional): Whether to write/export the result or not.
enforce_aoi (bool, optional): either or not an aoi should be set to allow the reclassification
"""
MAX_CLASS = 20
"int: the number of line in the table to trigger the display of an extra toolbar and alert"
model = None
"ReclassifyModel: the reclassify model to manipulate the classification dataset"
gee = None
"bool: either being linked to gee or not (use local file or GEE asset for the rest of the app)"
alert = None
"sw.Alert: the alert to display informations about computation"
title = None
"v.Cardtitle: the title of the card"
w_asset = None
"sw.AssetSelect: the widget to select an asset input"
w_raster = None
"sw.FileInput: the widget to select a file input"
w_image = None
"Widget: wraper of the input. linked to w_asset if gee=True, else to w_raster"
w_code = None
"int|str: widget to select the band/property used as init classification in the input file"
get_table_btn = None
"sw.Btn: the btn to load the data in the reclassification table"
w_dst_class_file = None
"sw.FileInput: widget to select the new classification system file (3 headless columns: 'code', 'desc', 'color')"
reclassify_table = None
"ReclassifyTable: the reclassification table populated via the previous widgets"
reclassify_btn = None
"sw.Btn: the btn to launch the reclassifying process"
def __init__(
self,
model=None,
class_path=Path.home(),
out_path=Path.home() / "downloads",
gee=False,
dst_class=None,
default_class={},
aoi_model=None,
save=True,
folder=None,
enforce_aoi=False,
**kwargs,
):
# create metadata to make it compatible with the framwork app system
self._metadata = {"mount_id": "reclassify_tile"}
# init card parameters
self.class_ = "pa-5"
# create the object
super().__init__(**kwargs)
# set up a default model
self.model = model or ReclassifyModel(
gee=gee,
dst_dir=out_path,
aoi_model=aoi_model,
folder=folder,
save=save,
enforce_aoi=enforce_aoi,
)
if enforce_aoi != self.model.enforce_aoi:
raise Exception(
"Both reclassify_model.gee and reclassify_view parameters has to be equals."
+ f"Received {enforce_aoi} for reclassify_view and {self.model.enforce_aoi} for reclassify_model."
)
# set the folders
self.class_path = Path(class_path)
self.out_path = Path(out_path)
# save the gee binding
self.gee = gee
if gee:
su.init_ee()
# create an alert to display information to the user
self.alert = sw.Alert()
# set the title of the card
self.title = v.CardTitle(
children=[v.Html(tag="h2", children=[ms.rec.rec.title])]
)
# create the input widgets
self.w_input_title = v.Html(
tag="h2", children=[ms.rec.rec.input.title], class_="mt-5"
)
if self.gee:
self.w_image = sw.AssetSelect(label=ms.rec.rec.input.asset, folder=folder)
else:
self.w_image = sw.FileInput(
[".tif", ".vrt", ".tiff", ".geojson", ".shp"],
label=ms.rec.rec.input.file,
)
self.w_code = v.Select(
label=ms.rec.rec.input.band.label,
hint=ms.rec.rec.input.band.hint,
v_model=None,
items=[],
persistent_hint=True,
)
w_optional_title = v.Html(tag="h3", children=[ms.rec.rec.input.optional])
self.w_src_class_file = sw.FileInput(
[".csv"], label=ms.rec.rec.input.classif.label, folder=self.class_path
)
self.w_optional = v.ExpansionPanels(
class_="mt-5",
children=[
v.ExpansionPanel(
children=[
v.ExpansionPanelHeader(children=[w_optional_title]),
v.ExpansionPanelContent(children=[self.w_src_class_file]),
]
)
],
)
# create the destination class widgetss
self.w_class_title = v.Html(
tag="h2", children=[ms.rec.rec.input.classif.title], class_="mt-5"
)
if not dst_class:
self.w_dst_class_file = sw.FileInput(
[".csv"], label=ms.rec.rec.input.classif.label, folder=self.class_path
).hide()
else:
# We could save a little of time without creating this
self.w_dst_class_file.select_file(dst_class).hide()
self.btn_list = [
sw.Btn(
"Custom",
_metadata={"path": "custom"},
small=True,
class_="mr-2",
outlined=True,
)
] + [
sw.Btn(
f"use {name}",
_metadata={"path": path},
small=True,
class_="mr-2",
outlined=True,
)
for name, path in default_class.items()
]
self.w_default = v.Flex(class_="mt-5", children=self.btn_list)
# set the table and its toolbar
self.w_table_title = v.Html(
tag="h2", children=[ms.rec.rec.table], class_="mt-5"
)
self.save_dialog = SaveMatrixDialog(folder=out_path)
self.import_dialog = ImportMatrixDialog(folder=out_path)
self.get_table = sw.Btn(
ms.rec.rec.input.btn, "mdi-table", color="success", small=True
)
self.import_table = sw.Btn(
"import", "mdi-download", color="secondary", small=True, class_="ml-2 mr-2"
)
self.save_table = sw.Btn(
"save", "mdi-content-save", color="secondary", small=True
)
self.reclassify_btn = sw.Btn(
ms.rec.rec.btn, "mdi-checkerboard", small=True, disabled=True
)
self.toolbar = v.Toolbar(
class_="d-flex mb-6",
flat=True,
children=[
self.save_dialog,
self.import_dialog,
v.ToolbarTitle(children=["Actions"]),
v.Divider(class_="mx-4", inset=True, vertical=True),
self.get_table,
v.Divider(class_="mx-4", inset=True, vertical=True),
v.Flex(class_="ml-auto", children=[self.import_table, self.save_table]),
v.Divider(class_="mx-4", inset=True, vertical=True),
self.reclassify_btn,
],
)
self.reclassify_table = ReclassifyTable(self.model)
# create a duplicate layout that include the alert and the different btns
# it will be displayed if the number of class > MAX_CLASS
self.duplicate_layout = v.Layout(
class_="d-none", children=[self.toolbar, self.alert]
)
# bind to the model
# bind to the 2 raster and asset as they cannot be displayed at the same time
self.model = (
self.model.bind(self.w_image, "src_gee" if self.gee else "src_local")
.bind(self.w_code, "band")
.bind(self.w_dst_class_file, "dst_class_file")
)
# create the layout
self.children = [
self.title,
self.w_input_title,
self.w_image,
self.w_code,
self.w_optional,
self.w_class_title,
self.w_default,
self.w_dst_class_file,
self.alert,
self.w_table_title,
self.toolbar,
self.reclassify_table,
self.duplicate_layout,
]
# Decorate functions
self.reclassify = loading_button(self.alert, self.reclassify_btn, debug=True)(
self.reclassify
)
self.get_reclassify_table = loading_button(
self.alert, self.get_table, debug=True
)(self.get_reclassify_table)
self.load_matrix_content = loading_button(
self.alert, self.import_table, debug=True
)(self.load_matrix_content)
# JS Events
self.import_table.on_event("click", lambda *args: self.import_dialog.show())
self.import_dialog.load_btn.on_event("click", self.load_matrix_content)
self.save_table.on_event(
"click", lambda *args: self.save_dialog.show(self.model.matrix)
)
self.w_image.observe(self._update_band, "v_model")
self.get_table.on_event("click", self.get_reclassify_table)
self.reclassify_btn.on_event("click", self.reclassify)
[btn.on_event("click", self._set_dst_class_file) for btn in self.btn_list]
def _set_dst_class_file(self, widget, event, data):
"""Set the destination classification according to the one selected with btn. alter the widgets properties to reflect this change"""
# get the filename
filename = widget._metadata["path"]
if filename == "custom":
self.w_dst_class_file.show()
else:
self.w_dst_class_file.hide()
self.w_dst_class_file.select_file(filename)
# change the visibility of the btns
for btn in self.btn_list:
btn.outlined = False if btn == widget else True
return self
def load_matrix_content(self, widget, event, data):
"""
Load the content of the file in the matrix. The table need to be already set to perform this operation
Return:
self
"""
self.import_dialog.value = False
file = self.import_dialog.w_file.v_model
# exit if no files are selected
if not file:
raise Exception("No file has been selected")
# exit if no table is loaded
if not self.model.table_created:
raise Exception("You have to get the table before.")
# load the file
# sanity checks
input_data = pd.read_csv(file).fillna(NO_VALUE)
try:
input_data.astype("int64")
except Exception:
raise Exception(
"This file may contain non supported charaters for reclassification."
)
if len(input_data.columns) != 2:
# Try to identify the oclumns and subset them
if all([colname in list(input_data.columns) for colname in MATRIX_NAMES]):
input_data = input_data[MATRIX_NAMES]
else:
raise Exception(
"This file is not a properly formatted as classification matrix"
)
# check that the destination values are all available
widget = list(self.reclassify_table.class_select_list.values())[0]
classes = [i["value"] for i in widget.items]
if not all(v in classes for v in input_data.dst.unique()):
raise Exception(
"Some of the destination data are not existing in the destination dataset"
)
# fill the data
for _, row in input_data.iterrows():
src_code, dst_code = row.src, row.dst
if str(src_code) in self.reclassify_table.class_select_list:
self.reclassify_table.class_select_list[
str(src_code)
].v_model = dst_code
self.import_dialog.w_file.reset()
return self
def reclassify(self, widget, event, data):
"""
Reclassify the input and store it in the appropriate format.
The input is not saved locally to avoid memory overload.
Return:
self
"""
# create the output file
msg = self.model.reclassify()
# display a message to the user
self.alert.add_msg(msg, "success")
return self
@su.switch("loading", "disabled", on_widgets=["w_code"])
def _update_band(self, change):
"""Update the band possibility to the available bands/properties of the input"""
# guess the file type and save it in the model
self.model.get_type()
# update the bands values
self.w_code.v_model = None
self.w_code.items = self.model.get_bands()
return self
@su.switch("disabled", on_widgets=["reclassify_btn"], targets=[False])
@su.switch("table_created", on_widgets=["model"], targets=[True])
def get_reclassify_table(self, widget, event, data):
"""
Display a reclassify table which will lead the user to select
a local code 'from user' to a target code based on a classes file
Return:
self
"""
# get the destination classes
self.model.dst_class = self.model.get_classes()
# get the src_classes
self.model.src_class = self.model.unique()
# if the src_class_file is set overwrite src_class:
if self.w_src_class_file.v_model:
self.model.src_class = self.model.get_classes()
# reset the table
self.reclassify_table.set_table(self.model.dst_class, self.model.src_class)
# check if the duplicate_layout need to be displayed ?
self.duplicate_layout.class_ = "d-none"
if len(self.reclassify_table.children[0].children) - 1 > self.MAX_CLASS:
self.duplicate_layout.class_ = "d-block"
return self
def nest_tile(self):
"""
Prepare the view to be used as a nested component in a tile.
the elevation will be set to 0 and the title remove from children.
The mount_id will also be changed to nested
Return:
self
"""
# remove id
self._metadata["mount_id"] = "nested_tile"
# remove elevation
self.elevation = False
# remove title
without_title = self.children.copy()
without_title.remove(self.title)
self.children = without_title
return self
```
#### File: sepal_ui/scripts/utils.py
```python
import os
from pathlib import Path
from urllib.parse import urlparse
import string
import random
import math
import re
import warnings
from unidecode import unidecode
from functools import wraps
from itertools import product
import ee
from cryptography.fernet import Fernet
from matplotlib import colors as c
import sepal_ui
from .warning import SepalWarning
def hide_component(widget):
"""
hide a vuetify based component
Args:
widget (v.VuetifyWidget): the widget to hide
"""
if isinstance(widget, sepal_ui.sepalwidgets.sepalwidget.SepalWidget):
widget.hide()
elif "d-none" not in str(widget.class_):
widget.class_ = str(widget.class_).strip() + " d-none"
return widget
def show_component(widget):
"""
show a vuetify based component
Args:
widget (v.VuetifyWidget): the widget to hide
"""
if isinstance(widget, sepal_ui.sepalwidgets.sepalwidget.SepalWidget):
widget.show()
elif "d-none" in str(widget.class_):
widget.class_ = widget.class_.replace("d-none", "")
return widget
def create_download_link(pathname):
"""
Create a clickable link to download the pathname target
Args:
pathname (str | pathlib.Path): the pathname th download
Return:
(str): the download link
"""
if type(pathname) == str:
pathname = Path(pathname)
result_path = Path(pathname).expanduser()
home_path = Path("~").expanduser()
# will be available with python 3.9
# download_path = result_path.relative_to(home_path) if result_path.is_relative_to(home_path) else result_path
download_path = os.path.relpath(result_path, home_path)
link = f"/api/files/download?path=/{download_path}"
return link
def is_absolute(url):
"""
Check if the given URL is an absolute or relative path
Args:
url (str): the URL to test
Return:
(bool): True if absolute else False
"""
return bool(urlparse(str(url)).netloc)
def random_string(string_length=3):
"""
Generates a random string of fixed length.
Args:
string_length (int, optional): Fixed length. Defaults to 3.
Return:
(str): A random string
"""
# random.seed(1001)
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(string_length))
def get_file_size(filename):
"""
Get the file size as string of 2 digit in the adapted scale (B, KB, MB....)
Args:
filename (str | pathlib.Path): the path to the file to mesure
Return:
(str): the file size in a readable humanly readable
"""
file_size = Path(filename).stat().st_size
if file_size == 0:
return "0B"
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(file_size, 1024)))
s = file_size / (1024 ** i)
return "{:.1f} {}".format(s, size_name[i])
def init_ee():
"""
Initialize earth engine according to the environment.
It will use the creddential file if the EE_PRIVATE_KEY env variable exist.
Otherwise it use the simple Initialize command (asking the user to register if necessary)
"""
# only do the initialization if the credential are missing
if not ee.data._credentials:
# if the decrypt key is available use the decript key
if "EE_DECRYPT_KEY" in os.environ:
# read the key as byte
key = os.environ["EE_DECRYPT_KEY"].encode()
# create the fernet object
fernet = Fernet(key)
# decrypt the key
json_encrypted = Path(__file__).parent / "encrypted_key.json"
with json_encrypted.open("rb") as f:
json_decripted = fernet.decrypt(f.read()).decode()
# write it to a file
with open("ee_private_key.json", "w") as f:
f.write(json_decripted)
# connection to the service account
service_account = "<EMAIL>"
credentials = ee.ServiceAccountCredentials(
service_account, "ee_private_key.json"
)
ee.Initialize(credentials)
# if in local env use the local user credential
else:
ee.Initialize()
return
def catch_errors(alert, debug=False):
"""
Decorator to execute try/except sentence
and catch errors in the alert message.
If debug is True then the error is raised anyway
Params:
alert (sw.Alert): Alert to display errors
debug (bool): Wether to raise the error or not, default to false
"""
def decorator_alert_error(func):
@wraps(func)
def wrapper_alert_error(*args, **kwargs):
value = None
try:
value = func(*args, **kwargs)
except Exception as e:
alert.add_msg(f"{e}", type_="error")
if debug:
raise e
return value
return wrapper_alert_error
return decorator_alert_error
def need_ee(func):
"""
Decorator to execute check if the object require EE binding.
Trigger an exception if the connection is not possible.
Params:
func (obj): the object on which the decorator is applied
"""
@wraps(func)
def wrapper_ee(*args, **kwargs):
# try to connect to ee
try:
init_ee()
except Exception:
raise Exception("This function needs an Earth Engine authentication")
return func(*args, **kwargs)
return wrapper_ee
def loading_button(alert=None, button=None, debug=False):
"""
Decorator to execute try/except sentence and toggle loading button object.
Designed to work within the Tile object, or any object that have a self.btn and self.alert set.
Params:
button (sw.Btn, optional): Toggled button
alert (sw.Alert, optional): the alert to display the error message
debug (bool, optional): wether or not the exception should stop the execution. default to False
"""
def decorator_loading(func):
@wraps(func)
def wrapper_loading(self, *args, **kwargs):
# set btn and alert
# Change name of variable to assign it again in this scope
button_ = self.btn if not button else button
alert_ = self.alert if not alert else alert
# Clean previous loaded messages in alert
alert_.reset()
button_.toggle_loading() # Start loading
value = None
try:
# Catch warnings in the process function
with warnings.catch_warnings(record=True) as w_list:
value = func(self, *args, **kwargs)
# Check if there are warnings in the function and append them
# Use append msg as several warnings could be triggered
if w_list:
# split the warning list
w_list_sepal = [
w for w in w_list if isinstance(w.message, SepalWarning)
]
# display the sepal one
ms_list = [
f"{w.category.__name__}: {w.message.args[0]}"
for w in w_list_sepal
]
[alert_.append_msg(ms, type_="warning") for ms in ms_list]
# only display them in the console if debug mode
if debug:
def custom_showwarning(w):
return warnings.showwarning(
message=w.message,
category=w.category,
filename=w.filename,
lineno=w.lineno,
line=w.line,
)
[custom_showwarning(w) for w in w_list]
except Exception as e:
alert_.add_msg(f"{e}", "error")
if debug:
button_.toggle_loading() # Stop loading button if there is an error
raise e
button_.toggle_loading() # Stop loading button
return value
return wrapper_loading
return decorator_loading
def normalize_str(msg, folder=True):
"""
Normalize an str to make it compatible with file naming (no spaces, special chars ...etc)
Params:
msg (str): the string to sanitise
folder (optional|bool): if the name will be used for folder naming or for display. if display, <'> and < > characters will be kept
Return:
(str): the modified str
"""
regex = "[^a-zA-Z\d\-\_]" if folder else "[^a-zA-Z\d\-\_\ ']"
return re.sub(regex, "_", unidecode(msg))
def to_colors(in_color, out_type="hex"):
"""
Transform any color type into a color in the specified output format
avalable format: hex
Args:
in_color (str or tuple): It can be a string (e.g., 'red', '#ffff00', 'ffff00') or RGB tuple (e.g., (255, 127, 0)).
out_type (str, optional): the type of the output color from ['hex']. default to 'hex'
Returns:
(str|tuple): The color in the specified format. default to black.
"""
# list of the color function used for the translatio
c_func = {"hex": c.to_hex}
transform = c_func[out_type]
out_color = "#000000" # default black color
if isinstance(in_color, tuple) and len(in_color) == 3:
# rescale color if necessary
if all(isinstance(item, int) for item in in_color):
in_color = [c / 255.0 for c in in_color]
return transform(in_color)
else:
# try to guess the color system
try:
return transform(in_color)
except Exception:
pass
# try again by adding an extra # (GEE handle hex codes without #)
try:
return transform(f"#{in_color}")
except Exception:
pass
return transform(out_color)
def switch(*params, debug=True, on_widgets=[], targets=[]):
"""
Decorator to switch the state of input boolean parameters on class widgets or the
class itself. If on_widgets is defined, it will switch the state of every widget
parameter, otherwise it will change the state of the class (self). You can also set
two decorators on the same function, one could affect the class and other the widgets.
Args:
*params (str): any boolean parameter of a SepalWidget.
debug (bool): Whether trigger or not an Exception if the decorated function fails.
on_widgets (list(widget_names,)|optional): List of widget names into the class
targets (list(bool,)|optional); list of the target value (value taht will be set on switch. default to the inverse of the current state.
"""
def decorator_switch(func):
@wraps(func)
def wrapper_switch(self, *args, **kwargs):
widgets_len = len(on_widgets)
targets_len = len(targets)
# sanity check on targets and on_widgets
if widgets_len and targets_len:
if widgets_len != targets_len:
raise IndexError(
f'the length of "on_widgets" ({widgets_len}) is different from the length of "targets" ({targets_len})'
)
# create the list of target values based on the target list
# or the initial values of the widgets params
# The first one is taken as reference
if not targets_len:
w = getattr(self, on_widgets[0]) if widgets_len else self
targets_ = [bool(getattr(w, p)) for p in params]
else:
targets_ = targets
if widgets_len:
# Verify that the input elements are strings
wrong_types = [
(w, type(w)) for w in on_widgets if not isinstance(w, str)
]
if len(wrong_types):
errors = [
f"Received:{w_type} for widget: {w}."
for w, w_type in wrong_types
]
raise TypeError(
f"All on_widgets list elements has to be strings. [{' '.join(errors)}]"
)
missing_widgets = [w for w in on_widgets if not hasattr(self, w)]
if missing_widgets:
raise Exception(
f"The provided {missing_widgets} widget(s) does not exist in the current class"
)
def w_assign(bool_targets):
params_targets = [
(p, bool_targets[i]) for i, p in enumerate(params)
]
for (w_name, p_t) in product(on_widgets, params_targets):
param, target = p_t
widget = getattr(self, w_name)
setattr(widget, param, target)
else:
def w_assign(bool_targets):
for i, p in enumerate(params):
setattr(self, p, bool_targets[i])
# assgn the parameters to the target inverse
w_assign([not t for t in targets_])
# execute the function and catch errors
try:
func(self, *args, **kwargs)
except Exception as e:
if debug:
w_assign(targets_)
raise e
# reassign the parameters to the targets
w_assign(targets_)
return wrapper_switch
return decorator_switch
def next_string(string):
"""Create a string followed by an underscore and a consecutive number"""
# if the string is already numbered the last digit is separeted from the rest of the string by an "_"
split = string.split("_")
end = split[-1]
if end.isdigit():
string = "_".join(split[:-1]) + f"_{int(end)+1}"
else:
string += "_1"
return string
```
#### File: sepal_ui/sepalwidgets/sepalwidget.py
```python
import ipyvuetify as v
from traitlets import Unicode, Bool, observe
__all__ = ["TYPES", "SepalWidget"]
TYPES = ("info", "secondary", "primary", "error", "warning", "success", "accent")
class SepalWidget(v.VuetifyWidget):
"""
Custom vuetifyWidget to add specific methods
"""
viz = Bool(True).tag(sync=True)
"Bool: whether the widget is displayed or not"
old_class = Unicode("").tag(sync=True)
"Unicode: a saving attribute of the widget class"
def __init__(self, **kwargs):
# remove viz from kwargs
# class_list need to be setup before viz
# to let hide and shw function run
viz = kwargs.pop("viz", True)
# init the widget
super().__init__(**kwargs)
# setup the viz status
self.viz = viz
@observe("viz")
def _set_viz(self, change):
"""
hide or show the component according to its viz param value.
Hide the widget by reducing the html class to :code:`d-none`.
Show the widget by removing the :code:`d-none` html class.
Save the previous class
Args:
change: the dict of a trait callback
"""
# will be replaced byt direct calls to built-in hide
# once the previous custom implementation will be fully removed
if self.viz:
# change class value
self.class_ = self.old_class or self.class_
self.class_list.remove("d-none")
else:
# change class value
self.class_list.remove("d-none")
self.old_class = str(self.class_)
self.class_ = "d-none"
return
def toggle_viz(self):
"""
toogle the visibility of the widget.
Return:
self
"""
self.viz = not self.viz
return self
def hide(self):
"""
Hide the widget by reducing the html class to :code:`d-none`.
Save the previous class and set viz attribute to False.
Return:
self
"""
# update viz state
self.viz = False
return self
def show(self):
"""
Show the widget by removing the d-none html class.
Save the previous class and set viz attribute to True.
Return:
self
"""
# update viz state
self.viz = True
return self
def reset(self):
"""
Clear the widget v_model. Need to be extented in custom widgets to fit the structure of the actual input.
Return:
self
"""
self.v_model = None
return self
```
#### File: sepal_ui/translator/translator.py
```python
import json
from types import SimpleNamespace
from pathlib import Path
from collections import abc
from deepdiff import DeepDiff
class Translator(SimpleNamespace):
"""
The translator is a SimpleNamespace of Simplenamespace. It reads 2 Json files, the first one being the source language (usually English) and the second one the target language.
It will replace in the source dictionary every key that exist in both json dictionaries. Following this procedure, every message that is not translated can still be accessed in the source language.
To access the dictionary keys, instead of using [], you can simply use key name as in an object ex: translator.first_key.secondary_key.
There are no depth limits, just respect the snake_case convention when naming your keys in the .json files.
Args:
json_folder (str | pathlib.Path): the folder where the dictionaries are stored
target_lan (str): the language code of the target lang (it should be the same as the target dictionary)
default_lan (str): the language code of the source lang (it should be the same as the source dictionary)
"""
FORBIDDEN_KEYS = ["default_dict", "target_dict", "in", "class"]
"list(str): list of the forbidden keys, using one of them in a translation dict will throw an error"
target_dict = {}
"(dict): the target language dictionary"
default_dict = {}
"dict: the source language dictionary"
keys = None
"all the keys can be acceced as attributes"
def __init__(self, json_folder, target_lan, default_lan="en"):
super().__init__()
if type(json_folder) == str:
json_folder = Path(json_folder)
# reading both the default dict
source_path = json_folder / f"{default_lan}.json"
self.default_dict = json.loads(source_path.read_text())
# create a composite dict replaceing all the default keys with the one availabel in the target lan
target_path = json_folder / f"{target_lan}.json"
self.target_dict = self.default_dict.copy()
if target_path.is_file():
self.target_dict = json.loads(target_path.read_text())
else:
print(f'No json file is provided for "{target_lan}", fallback to "en"')
# create the composite dictionary
ms_dict = self._update(self.default_dict, self.target_dict)
# verify if 'default_dict' or 'target_dict' is in use
[self.search_key(ms_dict, k) for k in self.FORBIDDEN_KEYS]
# transform it into a json str
ms_json = json.dumps(ms_dict)
# unpack the json as a simple namespace
ms = json.loads(ms_json, object_hook=lambda d: SimpleNamespace(**d))
for k, v in ms.__dict__.items():
setattr(self, k, getattr(ms, k))
@classmethod
def search_key(cls, d, key):
"""
Search a specific key in the d dictionary and raise an error if found
Args:
d (dict): the dictionary to study
key (str): the key to look for
"""
for k, v in d.items():
if isinstance(v, abc.Mapping):
cls.search_key(v, key)
else:
if k == key:
raise Exception(
f"You cannot use the key {key} in your translation dictionary"
)
return
def _update(self, d, u):
"""
Update the fallback dictionnaire (d) values with the keys that exist in the target (u) dictionnaire
Args:
d (dict): The fallback dictionary
u (dict): the target dctionnary
Return:
ms (dict): The updated dictionnay
"""
ms = d.copy()
for k, v in u.items():
if isinstance(v, abc.Mapping):
ms[k] = self._update(d.get(k, {}), v)
else:
ms[k] = v
return ms
def missing_keys(self):
"""
this function is intended for developer use only
print the list of the missing keys in the target dictionnairie
Return:
(str): the list of missing keys
"""
# find all the missing keys
try:
ddiff = DeepDiff(self.default_dict, self.target_dict)[
"dictionary_item_removed"
]
except Exception:
ddiff = ["All messages are translated"]
return "\n".join(ddiff)
```
#### File: sepal_ui/tests/test_AoiModel.py
```python
import ee
from urllib.request import urlretrieve
from zipfile import ZipFile
import pytest
from sepal_ui import aoi
class TestAoiModel:
def test_init(self, alert, gee_dir, asset_italy, fake_vector):
# default init
aoi_model = aoi.AoiModel(alert, folder=gee_dir)
assert isinstance(aoi_model, aoi.AoiModel)
assert aoi_model.ee is True
# with default assetId
aoi_model = aoi.AoiModel(alert, asset=asset_italy, folder=gee_dir)
assert aoi_model.asset_name["pathname"] == asset_italy
assert aoi_model.default_asset["pathname"] == asset_italy
assert all(aoi_model.gdf) is not None
assert aoi_model.feature_collection is not None
assert aoi_model.name == "italy"
# chack that wrongly defined asset_name raise errors
with pytest.raises(Exception):
aoi_model = aoi.AoiModel(alert, folder=gee_dir)
aoi_model._from_asset({"pathname": None})
with pytest.raises(Exception):
aoi_model = aoi.AoiModel(alert, folder=gee_dir)
aoi_model._from_asset(
{"pathname": asset_italy, "column": "ADM0_CODE", "value": None}
)
# it should be the same with a different name
aoi_model = aoi.AoiModel(alert, folder=gee_dir)
aoi_model._from_asset(
{"pathname": asset_italy, "column": "ADM0_CODE", "value": 122}
)
assert aoi_model.name == "italy_ADM0_CODE_122"
# with a default admin
admin = 85 # GAUL France
aoi_model = aoi.AoiModel(alert, admin=admin, folder=gee_dir)
assert aoi_model.name == "FRA"
# with a default vector
aoi_model = aoi.AoiModel(alert, vector=fake_vector, gee=False)
assert aoi_model.name == "gadm36_VAT_0"
# test with a non ee definition
admin = "FRA" # GADM France
aoi_model = aoi.AoiModel(alert, gee=False, admin=admin)
assert aoi_model.name == "FRA"
return
def test_get_columns(self, aoi_model_france):
# test data
test_data = [
"ADM0_CODE",
"ADM0_NAME",
"DISP_AREA",
"EXP0_YEAR",
"STATUS",
"STR0_YEAR",
"Shape_Leng",
]
res = aoi_model_france.get_columns()
assert res == test_data
return
def test_get_fields(self, aoi_model_france):
# init
column = "ADM0_CODE"
res = aoi_model_france.get_fields(column)
assert res == [85]
return
def test_get_selected(self, aoi_model_france, asset_france):
# init
ee_france = ee.FeatureCollection(asset_france)
# select the geometry associated with france (all of it)
column = "ADM0_CODE"
field = 85
feature = aoi_model_france.get_selected(column, field)
feature_geom = feature.geometry().getInfo()
france_geom = ee_france.geometry().getInfo()
assert feature_geom == france_geom
return
def test_clear_attributes(self, alert, gee_dir):
aoi_model = aoi.AoiModel(alert, folder=gee_dir)
dum = "dum"
# insert dum parameter everywhere
aoi_model.method = dum
aoi_model.point_json = dum
aoi_model.vector_json = dum
aoi_model.geo_json = dum
aoi_model.admin = dum
aoi_model.asset_name = dum
aoi_model.name = dum
aoi_model.gdf = dum
aoi_model.feature_collection = dum
aoi_model.ipygeojson = dum
# clear them
aoi_model.clear_attributes()
assert aoi_model.method is None
assert aoi_model.point_json is None
assert aoi_model.vector_json is None
assert aoi_model.geo_json is None
assert aoi_model.admin is None
assert aoi_model.asset_name is None
assert aoi_model.name is None
assert aoi_model.gdf is None
assert aoi_model.feature_collection is None
assert aoi_model.ipygeojson is None
assert aoi_model.default_asset is None
assert aoi_model.default_admin is None
assert aoi_model.default_vector is None
# check that default are saved
aoi_model = aoi.AoiModel(alert, admin=85, folder=gee_dir) # GAUL for France
# insert dummy args
aoi_model.method = dum
aoi_model.point_json = dum
aoi_model.vector_json = dum
aoi_model.geo_json = dum
aoi_model.admin = dum
aoi_model.asset_name = dum
aoi_model.name = dum
aoi_model.gdf = dum
aoi_model.feature_collection = dum
aoi_model.ipygeojson = dum
# clear
aoi_model.clear_attributes()
# assert that it's still france
assert aoi_model.name == "FRA"
return
def test_total_bounds(self, aoi_model_france):
# test data
expected_bounds = (
-5.142230921252722,
41.33878298628808,
9.561552263332496,
51.09281241936492,
)
bounds = aoi_model_france.total_bounds()
assert bounds == expected_bounds
return
@pytest.fixture
def fake_vector(self, tmp_dir):
"""create a fake vector file from the GADM definition of vatican city and save it in the tmp dir. the tmp files will be destroyed after the test."""
# download vatican city from GADM
file = tmp_dir / "test.zip"
gadm_vat_link = "https://biogeo.ucdavis.edu/data/gadm3.6/shp/gadm36_VAT_shp.zip"
name = "gadm36_VAT_0"
urlretrieve(gadm_vat_link, file)
with ZipFile(file, "r") as zip_ref:
zip_ref.extractall(tmp_dir)
file.unlink()
yield tmp_dir / f"{name}.shp"
# destroy the file after the test
[f.unlink() for f in tmp_dir.glob(f"{name}.*")]
return
@pytest.fixture
def aoi_model_france(self, alert, gee_dir, asset_france):
"""create a dummy alert and a test aoi model based on GEE that use the france asset available on the test account"""
return aoi.AoiModel(alert, asset=asset_france, folder=gee_dir)
```
#### File: sepal_ui/tests/test_AoiView.py
```python
import pytest
from sepal_ui import aoi
from sepal_ui.mapping import SepalMap
from sepal_ui.message import ms
class TestAoiView:
def test_init(self, gee_dir):
# default init
view = aoi.AoiView(folder=gee_dir)
assert isinstance(view, aoi.AoiView)
# init without ee
view = aoi.AoiView(gee=False)
assert view.model.ee is False
# init with ADMIN
view = aoi.AoiView("ADMIN", folder=gee_dir)
assert {"header": "CUSTOM"} not in view.w_method.items
# init with CUSTOM
view = aoi.AoiView("CUSTOM", folder=gee_dir)
assert {"header": "ADMIN"} not in view.w_method.items
# init with a list
view = aoi.AoiView(["POINTS"], folder=gee_dir)
assert {"text": ms.aoi_sel.points, "value": "POINTS"} in view.w_method.items
assert len(view.w_method.items) == 1 + 1 # 1 for the header, 1 for the object
# init with a remove list
view = aoi.AoiView(["-POINTS"], folder=gee_dir)
assert {"text": ms.aoi_sel.points, "value": "POINTS"} not in view.w_method.items
assert (
len(view.w_method.items) == len(aoi.AoiModel.METHODS) + 2 - 1
) # 2 headers this time
# init with a mix of both
with pytest.raises(Exception):
view = aoi.AoiView(["-POINTS", "DRAW"], folder=gee_dir)
# init with a non existing keyword
with pytest.raises(Exception):
view = aoi.AoiView(["TOTO"], folder=gee_dir)
# init with a map
m = SepalMap(dc=True)
view = aoi.AoiView(map_=m, folder=gee_dir)
assert view.map_ == m
return
def test_admin(self, gee_dir):
# test if admin0 is in Gaul
view = aoi.AoiView(folder=gee_dir)
first_gaul_item = {"text": "Abyei", "value": 102}
assert first_gaul_item == view.w_admin_0.items[0]
# test if admin0 is in gadm
view = aoi.AoiView(gee=False)
first_gadm_item = {"text": "Afghanistan", "value": "AFG"}
assert first_gadm_item == view.w_admin_0.items[0]
return
def test_activate(self, aoi_gee_view):
view = aoi_gee_view
for method in aoi.AoiModel.METHODS:
view.w_method.v_model = method
for k, c in view.components.items():
if k == method:
assert "d-none" not in c.class_
elif hasattr(c, "parent"):
if view.components[k].parent == c:
assert "d-none" not in c.class_
else:
assert "d-none" in c.class_
# test the cascade of the admin selector
view.w_method.v_model = "ADMIN2"
view.w_admin_0.v_model = view.w_admin_0.items[0]["value"]
assert len(view.w_admin_1.items)
view.w_admin_1.v_model = view.w_admin_1.items[0]["value"]
assert len(view.w_admin_2.items)
return
def test_update_aoi(self, aoi_gee_view, aoi_local_view):
# select Italy
item = next(i for i in aoi_gee_view.w_admin_0.items if i["text"] == "Italy")
aoi_gee_view.w_method.v_model = "ADMIN0"
aoi_gee_view.w_admin_0.v_model = item["value"]
# launch the update
aoi_gee_view._update_aoi(None, None, None)
# perform checks
assert aoi_gee_view.updated == 1
assert aoi_gee_view.model.name == "ITA"
assert len(aoi_gee_view.map_.layers) == 2
# same without GEE
# select Italy
item = next(i for i in aoi_local_view.w_admin_0.items if i["text"] == "Italy")
aoi_local_view.w_method.v_model = "ADMIN0"
aoi_local_view.w_admin_0.v_model = item["value"]
# launch the update
aoi_local_view._update_aoi(None, None, None)
# perform checks
assert aoi_local_view.updated == 1
assert aoi_local_view.model.name == "ITA"
assert len(aoi_local_view.map_.layers) == 2
return
def test_reset(self, aoi_gee_view):
# select Italy
item = next(i for i in aoi_gee_view.w_admin_0.items if i["text"] == "Italy")
aoi_gee_view.w_method.v_model == "ADMIN0"
aoi_gee_view.w_admin_0.v_model = item["value"]
# launch the update
aoi_gee_view._update_aoi(None, None, None)
# reset
aoi_gee_view.reset()
# checks
assert len(aoi_gee_view.map_.layers) == 1
assert aoi_gee_view.w_method.v_model is None
assert aoi_gee_view.model.name is None
return
def test_polygonize(self):
src_json = {
"properties": {"style": {"radius": 1000}}, # 1 km
"geometry": {"coordinates": [0, 0]},
}
# number of sides in the polygons
# check this number instead of a regular output
# because different geopandas versions give different results (7th decimal)
# check the transformation
dst_json = aoi.AoiView.polygonize(src_json)
assert dst_json["geometry"]["type"] == "Polygon"
assert len(dst_json["geometry"]["coordinates"][0]) == 65
return
@pytest.fixture
def aoi_gee_view(self, gee_dir):
"""create an AoiView based on GEE with a silent sepalMap"""
m = SepalMap(dc=True)
return aoi.AoiView(map_=m, folder=gee_dir)
@pytest.fixture
def aoi_local_view(self, gee_dir):
"""create an AoiView based on GADM with a silent sepalMap"""
m = SepalMap(dc=True)
return aoi.AoiView(map_=m, gee=False)
```
#### File: sepal_ui/tests/test_DatePicker.py
```python
import pytest
from traitlets import Any
from sepal_ui import sepalwidgets as sw
from sepal_ui.model import Model
class TestDatePicker:
def test_init(self):
# default init
datepicker = sw.DatePicker()
assert isinstance(datepicker, sw.DatePicker)
# exhaustive
datepicker = sw.DatePicker("toto")
assert isinstance(datepicker, sw.DatePicker)
return
def test_bind(self, datepicker):
class Test_io(Model):
out = Any(None).tag(sync=True)
test_io = Test_io()
test_io.bind(datepicker, "out")
date = "2020-06-12"
datepicker.v_model = date
assert test_io.out == date
assert datepicker.menu.v_model is False
return
@pytest.fixture
def datepicker(self):
"""create a default datepicker"""
return sw.DatePicker()
```
#### File: sepal_ui/tests/test_LoadTableField.py
```python
import pandas as pd
import pytest
from sepal_ui import sepalwidgets as sw
class TestLoadTableField:
def test_init(self, load_table):
assert isinstance(load_table, sw.LoadTableField)
return
def test_on_file_input_change(self, load_table, fake_table, wrong_table):
# change the value of the file
load_table._on_file_input_change({"new": str(fake_table)})
test_data = {
"pathname": str(fake_table),
"id_column": "id",
"lng_column": "lng",
"lat_column": "lat",
}
assert load_table.v_model == test_data
# change for a empty update
load_table._on_file_input_change({"new": None})
assert load_table.v_model == load_table.default_v_model
# test if the csv have not enough columns
load_table._on_file_input_change({"new": str(wrong_table)})
assert load_table.v_model == load_table.default_v_model
assert load_table.fileInput.selected_file.error_messages is not None
return
def test_reset(self, fake_table, load_table):
# change the value of the file
load_table._on_file_input_change({"new": str(fake_table)})
# reset the loadtable
load_table.reset()
# assert the current values
assert load_table.v_model == load_table.default_v_model
return
@pytest.fixture
def load_table(self):
"""create a default load table"""
return sw.LoadTableField()
@pytest.fixture
def fake_table(self, tmp_dir):
"""create a fake table"""
filename = tmp_dir / "test.csv"
end = 3
coloseo = [1, 41.89042582290999, 12.492241627092199]
fao = [2, 41.88369224629387, 12.489216069409004]
columns = ["id", "lat", "lng"]
df = pd.DataFrame([coloseo[:end], fao[:end]], columns=columns[:end])
df.to_csv(filename, index=False)
yield filename
# delete the file
filename.unlink()
return
@pytest.fixture
def wrong_table(self, tmp_dir):
"""create a wrongly defined table (with 2 columns instead of the minimal 3"""
filename = tmp_dir / "wrong_test.csv"
end = 2
coloseo = [1, 41.89042582290999, 12.492241627092199]
fao = [2, 41.88369224629387, 12.489216069409004]
columns = ["id", "lat", "lng"]
df = pd.DataFrame([coloseo[:end], fao[:end]], columns=columns[:end])
df.to_csv(filename, index=False)
yield filename
# delete the file
filename.unlink()
return
```
#### File: sepal_ui/tests/test_PasswordField.py
```python
import pytest
from sepal_ui import sepalwidgets as sw
class TestPasswordField:
def test_init(self, password):
assert isinstance(password, sw.PasswordField)
assert password.type == "password"
return
def test_toogle_viz(self, password):
# change the viz once
password._toggle_pwd(None, None, None)
assert password.type == "text"
assert password.append_icon == "mdi-eye"
# change it a second time
password._toggle_pwd(None, None, None)
assert password.type == "password"
assert password.append_icon == "mdi-eye-off"
return
@pytest.fixture
def password(self):
"""return a passwordfield"""
return sw.PasswordField()
```
#### File: sepal_ui/tests/test_ReclassifyView.py
```python
from pathlib import Path
from zipfile import ZipFile
import pytest
import geopandas as gpd
from sepal_ui.reclassify import ReclassifyView, ReclassifyModel
from sepal_ui import aoi
class TestReclassifyView:
def test_init_exception(alert, gee_dir):
"""Test exceptions"""
aoi_model = aoi.AoiModel(alert, gee=False)
# aoi_model has to be local when using local view.
with pytest.raises(Exception):
ReclassifyView(aoi_model=aoi_model, gee=True, folder=gee_dir)
return
def test_init_local(self, view_local, class_file):
assert view_local.model.aoi_model.ee is False
assert view_local.gee is False
# Check that all the classes buttons were created
btn_paths = [btn._metadata["path"] for btn in view_local.btn_list]
assert str(class_file) in btn_paths
assert "custom" in btn_paths
return
def test_init_gee(self, view_gee):
assert view_gee.model.aoi_model.ee is True
assert view_gee.gee is True
return
def test_set_dst_class_file(self, view_local, class_file):
# Arrange
btn_list = [btn for btn in view_local.btn_list if btn._metadata["path"]]
custom_btn, class_file_btn = btn_list
# Act
view_local._set_dst_class_file(class_file_btn, None, None)
# Assert outlined styles
for btn in view_local.btn_list:
if btn._metadata["path"] == str(class_file):
assert btn.outlined is False
else:
assert btn.outlined is True
# Assert select_file visibility
assert "d-none" in view_local.w_dst_class_file.class_
# select custom instead
view_local._set_dst_class_file(custom_btn, None, None)
# check that the w_dst_class_file is now visible
assert "d-none" not in view_local.w_dst_class_file.class_
return
def test_load_matrix_content(
self,
view_local,
map_file_bad_char,
map_file_bad_header,
map_file,
model_local_vector,
class_file,
):
# No file selected
view_local.import_dialog.w_file.v_model = ""
with pytest.raises(Exception):
view_local.load_matrix_content(None, None, None)
# Wrong characters in mapping file
with pytest.raises(Exception):
view_local.import_dialog.w_file.v_model = str(map_file_bad_char)
view_local.load_matrix_content(None, None, None)
# More than one column without headers
with pytest.raises(Exception):
view_local.import_dialog.w_file.v_model = str(map_file_bad_header)
view_local.load_matrix_content(None, None, None)
# When the table is not created before
view_local.import_dialog.w_file.v_model = str(map_file)
view_local.model.table_created = False
with pytest.raises(Exception):
view_local.load_matrix_content(None, None, None)
# Arrange
view_local.model = model_local_vector
view_local.model.dst_class_file = class_file
view_local.get_reclassify_table(None, None, None)
view_local.load_matrix_content(None, None, None)
return
def test_update_band(self, view_local, model_local_vector):
# Arrange
table_bands = ["BoroCode", "BoroName", "Shape_Area", "Shape_Leng"]
view_local.model = model_local_vector
# Act
view_local._update_band(None)
# Assert
assert view_local.w_code.items == table_bands
return
def test_reclassify(self, view_local, model_local_vector):
view_local.model = model_local_vector
view_local.reclassify(None, None, None)
matrix = {1: 6, 2: 7, 3: 8, 4: 9, 5: 10}
# Assert
assert view_local.model.dst_local is not None
reclassify_matrix = dict(
zip(
view_local.model.dst_local_memory["BoroCode"].to_list(),
view_local.model.dst_local_memory["reclass"].to_list(),
)
)
assert matrix == reclassify_matrix
return
@pytest.fixture
def view_local(self, tmp_dir, class_file, alert):
"""return a local reclassify view"""
aoi_model = aoi.AoiModel(alert, gee=False)
return ReclassifyView(
aoi_model=aoi_model,
gee=False,
out_path=tmp_dir,
class_path=tmp_dir,
default_class={"IPCC": str(class_file)},
)
@pytest.fixture
def view_gee(self, tmp_dir, class_file, gee_dir, alert):
"""return a gee reclassify view"""
aoi_model = aoi.AoiModel(alert, gee=True, folder=gee_dir)
return ReclassifyView(
aoi_model=aoi_model,
gee=True,
folder=gee_dir,
out_path=tmp_dir,
class_path=tmp_dir,
default_class={"IPCC": str(class_file)},
)
@pytest.fixture
def class_file(self, tmp_dir):
file = tmp_dir / "dum_default_classes.csv"
file.write_text(
"1,Forest,#044D02\n"
"2,Grassland,#F5FF00\n"
"3,Cropland,#FF8100\n"
"4,Wetland,#0013FF\n"
"5,Settlement,#FFFFFF\n"
"6,Other land,#FF00DE\n"
)
yield file
file.unlink()
return
@pytest.fixture
def map_file_bad_char(self, tmp_dir):
bad_file = tmp_dir / "map_file_bad_char.csv"
bad_file.write_text(",src,dst\nnot_valid,not_valid")
yield bad_file
bad_file.unlink()
return
@pytest.fixture
def map_file_bad_header(self, tmp_dir):
bad_file = tmp_dir / "map_file_bad_header.csv"
bad_file.write_text(",xx,yy,zz\n,1,2,3")
yield bad_file
bad_file.unlink()
return
@pytest.fixture
def map_file(self, tmp_dir):
file = tmp_dir / "map_file.csv"
file.write_text(
",src,dst\n0,10,1\n1,100,1\n2,11,2\n"
"3,110,2\n4,12,3\n5,120,3\n6,130,3\n"
"7,150,3\n8,160,3\n9,170,4\n10,180,4\n"
"11,190,4\n12,200,4\n13,210,5\n14,30,5\n"
"15,40,1\n16,50,1\n17,61,1\n18,90,6\n"
)
yield file
file.unlink()
return
@pytest.fixture
def model_local_vector(self, tmp_dir, alert):
aoi_model = aoi.AoiModel(alert, gee=False)
# create the vector file
file = Path(gpd.datasets.get_path("nybb").replace("zip:", ""))
with ZipFile(file, "r") as zip_ref:
zip_ref.extractall(tmp_dir)
model_local = ReclassifyModel(gee=False, dst_dir=tmp_dir, aoi_model=aoi_model)
model_local.src_local = tmp_dir / "nybb.shp"
model_local.get_type()
model_local.matrix = {1: 6, 2: 7, 3: 8, 4: 9, 5: 10}
model_local.band = "BoroCode"
yield model_local
# delete the shp files
[f.unlink() for f in tmp_dir.glob("nybb.*")]
return
```
#### File: sepal_ui/tests/test_sepalwidgets.py
```python
import ipyvuetify as v
from sepal_ui import sepalwidgets as sw
class TestSepalWidgets:
def test_generated(self):
"""test that all the vuetify classes have been overwritten"""
# get all the classes names
v_classes = [c for c in dir(v.generated) if c.startswith("__") is False]
v_classes = [c for c in v_classes if c != "VuetifyWidget"]
# set a class option
option = "ma-5"
print(locals().keys())
for c in v_classes:
if c in ["Alert", "Tooltip"]:
# they are meant to be hidden by default
# they are specific sepalwidgets and tested elswhere
continue
# test normal creation
w = getattr(sw, c)(class_=option)
assert w.viz is True
assert w.class_ == option
w.viz = False
assert w.class_ == "d-none"
assert w.viz is False
assert w.old_class == option
# test with extra sepalwidgets args
w = getattr(sw, c)(class_=option, viz=False)
assert w.class_ == "d-none"
assert w.viz is False
assert w.old_class == option
return
def test_html(self):
"""test a HTML class"""
# set a class option
option = "ma-5"
w = sw.Html(tag="H1", children=["toto"], class_=option, viz=False)
assert w.class_ == "d-none"
assert w.viz is False
assert w.old_class == option
return
```
#### File: sepal_ui/tests/test_TableView.py
```python
from sepal_ui import reclassify as rec
class TestTableView:
def test_init(self):
# default init
view = rec.TableView()
assert isinstance(view, rec.TableView)
return
def test_get_class(self):
return
def test_nest_tile(self):
# nest the tile
view = rec.TableView()
res = view.nest_tile()
assert res == view
assert view._metadata["mount_id"] == "nested_tile"
assert view.elevation == 0
assert len(view.children[0].children) == 1
return
``` |
{
"source": "12rambau/sphinx-icon",
"score": 3
} |
#### File: sphinxcontrib/icon/icon.py
```python
from pathlib import Path
import re
from docutils import nodes
from .font_handler import Fontawesome
from sphinx.util import logging
# -- global variables ----------------------------------------------------------
font_handler = None
logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class icon(nodes.General, nodes.Element):
pass
def download_font_assets(app):
"""
Download the fonts from the web assets and prepare them to be used in the documentation output directory
:param app: the current Sphinx application
"""
# start the font_handler
font_handler = Fontawesome()
# create a _font folder
output_dir = Path(app.outdir)
font_dir = output_dir / "_font"
font_dir.mkdir(exist_ok=True)
app.config.html_static_path.append(str(font_dir))
# guess what need to be installed
# based on the compiler
if app.builder.format == "html":
font_handler.download_asset("html", font_dir)
app.add_css_file(font_handler.get_css())
app.add_js_file(font_handler.get_js())
elif app.builder.format == "latex":
font_handler.download_asset("latex", font_dir)
return
def get_glyph(text):
"""
get the glyph from text
Return a tuple of (glyph, font) from the provided text. raise an error if one of them does not exist
:param text: The text to transform (e.g. "fa fa-folder")
"""
# split the icon name to find the name inside
m = re.match(r"^(fab|far|fa|fas) fa-([\w-]+)$", text)
if not m:
raise ValueError(f'invalid icon name: "{text}"')
# if not m.group(2) in font_handler.get_metadata():
# raise ValueError(f'icon "{m.group(2)}" is not part of fontawesome 5.15.4')
# return (font, glyph)
return m.group(1), m.group(2)
def depart_icon_node(self, node):
"""
Empty depart function, everything is handled in visit
"""
pass
def visit_icon_node_html(self, node):
"""
create the html output
"""
try:
font, glyph = get_glyph(node["icon"])
except ValueError as e:
logger.warning(str(e), location=node)
raise nodes.SkipNode
self.body.append(f'<i class="{font} fa-{glyph}"></i>')
return
def visit_icon_node_latex(self, node):
"""create the latex output"""
try:
font, glyph = get_glyph(node["icon"])
except ValueError as e:
logger.warning(str(e), location=node)
raise nodes.SkipNode
# detect the font
font_list = {"fa": None, "far": "regular", "fas": "solid", "fab": "brand"}
font = font_list[font]
# install fontawesome 5 package
# TODO install it on the fly using the otf files downloaded in var
package = "\\usepackage{fontawesome5}"
if package not in self.elements["preamble"]:
self.elements["preamble"] += f"{package}\n"
# build the output
cmd = "\\faIcon"
if font is not None:
cmd += f"[{font}]"
cmd += f"{{{glyph}}}"
self.body.append(cmd)
return
def visit_icon_node_unsuported(self, node):
"""raise error when the requested output is not supported"""
logger.warning("Unsupported output format (node skipped)")
raise nodes.SkipNode
_NODE_VISITORS = {
"html": (visit_icon_node_html, depart_icon_node),
"latex": (visit_icon_node_latex, depart_icon_node),
"man": (visit_icon_node_unsuported, None),
"texinfo": (visit_icon_node_unsuported, None),
"text": (visit_icon_node_unsuported, None),
}
def icon_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""
add inline icons
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
# create the node
node = icon(icon=text)
return [node], []
``` |
{
"source": "12remember/qrl-analytics",
"score": 2
} |
#### File: qrl_scraper/qrlNetwork/pipelines.py
```python
import psycopg2
import psycopg2.extras
import logging
import traceback
import sched, time
import os
import sys
import json
from scrapy import signals
from datetime import datetime
from django.utils import timezone
from psycopg2.extensions import AsIs
from psycopg2.extras import LoggingConnection, LoggingCursor
#from scrapy.conf import settings
from scrapy.exceptions import DropItem
from .items import QRLNetworkBlockItem, QRLNetworkTransactionItem, QRLNetworkAddressItem, QRLNetworkMissedItem
from .settings import connection , cur, scrap_url
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
DOCUMENT_DIR = os.path.join(PROJECT_ROOT, 'Documenten')
class QrlnetworkPipeline_block:
def open_spider(self, spider):
cur = connection.cursor()
def close_spider(self, spider):
cur.close()
connection.close()
def process_item(self, item, spider):
if not isinstance(item, QRLNetworkBlockItem):
return item
valid = True
for data in item:
if not data:
valid = False
logging.error('Missing data in block', data)
raise DropItem("Missing data in block {0}!".format(data))
if valid:
try:
datetimeNow = datetime.now()
cur.execute('SELECT "block_number" FROM public."qrl_blockchain_blocks" WHERE "block_number" = %s', (int(item['block_number']),))
dup_check = len(cur.fetchall())
if dup_check == 0:
convert_timestamp_to_datetime = datetime.fromtimestamp(int(item["block_found_datetime"])).strftime("%Y-%m-%d %H:%M:%S")
cur.execute('INSERT INTO public. "qrl_blockchain_blocks" (\
"block_number", "block_found", "block_result",\
"block_found_datetime", "block_found_timestamp_seconds", "block_reward_block", "block_reward_fee",\
"block_mining_nonce", "block_number_of_transactions","spider_name",\
"spider_version", "block_size", "block_hash_header_type" , "block_hash_header_data",\
"block_hash_header_type_prev" , "block_hash_header_data_prev", "block_merkle_root_type",\
"block_merkle_root_data", "block_added_timestamp"\
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s, %s, %s, %s, %s, %s ,%s , %s)',
(int(item['block_number']), item['block_found'],item['block_result'],
convert_timestamp_to_datetime, item['block_found_timestamp_seconds'], int(item["block_reward_block"]), int(item["block_reward_fee"]),
int(item["block_mining_nonce"]), int(item["block_number_of_transactions"]), item["spider_name"],
item["spider_version"], int(item["block_size"]), item["block_hash_header_type"],
item["block_hash_header_data"],item["block_hash_header_type_prev"],item["block_hash_header_data_prev"],
item["block_merkle_root_type"], item["block_merkle_root_data"], datetimeNow ))
connection.commit()
logging.warning('Got new block, number: %s ' % item['block_number'])
else:
raise DropItem("Already Got Blocknumber: %s" % item['block_number'])
except DropItem as duplicate :
logging.info(duplicate)
except (Exception, psycopg2.Error) as error:
spider_name = spider.name,
spider_version = spider.version,
location_script_file = str(__name__)
location_script_function = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
trace_back = traceback.format_exc(limit=None, chain=True)
error_type = str(type(error))
error = str(error)
item_url = item["item_url"]
spiderError(spider_name, spider_version, location_script_file, location_script_function, trace_back, error_type, error, item_url)
connection.rollback()
return item
class QrlnetworkPipeline_transaction:
def open_spider(self, spider):
cur = connection.cursor()
def close_spider(self, spider):
cur.close()
connection.close()
def process_item(self, item, spider):
if not isinstance(item, QRLNetworkTransactionItem):
return item
valid = True
for data in item:
if not data:
valid = False
logging.error('Missing data in transaction', data)
raise DropItem("Missing data in transaction {0}!".format(data))
if valid:
try:
datetimeNow = datetime.now()
cur.execute('SELECT "transaction_hash" FROM public."qrl_blockchain_transactions" WHERE "transaction_hash" = %s AND "transaction_receiving_wallet_address" = %s', (item['transaction_hash'], item['transaction_receiving_wallet_address']))
dup_check = len(cur.fetchall())
if dup_check == 0:
convert_timestamp_to_datetime = datetime.fromtimestamp(int(item["block_found_datetime"])).strftime("%Y-%m-%d %H:%M:%S")
cur.execute('INSERT INTO public. "qrl_blockchain_transactions" (\
"transaction_hash", "transaction_sending_wallet_address", "transaction_receiving_wallet_address",\
"transaction_amount_send", "transaction_type", "transaction_block_number",\
"transaction_found", "transaction_result","spider_name", \
"spider_version" , "master_addr_type", "master_addr_data",\
"master_addr_fee","public_key_type","public_key_data",\
"signature_type","signature_data", "transaction_nonce",\
"transaction_addrs_to_type", "block_found_datetime",\
"transaction_added_datetime" \
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)',
(item['transaction_hash'], item['transaction_sending_wallet_address'],item['transaction_receiving_wallet_address'],
int(item["transaction_amount_send"]), item["transaction_type"], int(item["transaction_block_number"]),
item["transaction_found"],item["transaction_result"], item["spider_name"],
item["spider_version"],item["master_addr_type"],item["master_addr_data"],
item["master_addr_fee"], item["public_key_type"], item["public_key_data"],
item["signature_type"], item["signature_data"], item["transaction_nonce"],
item["transaction_addrs_to_type"], convert_timestamp_to_datetime, datetimeNow ))
connection.commit()
logging.warning('Got new transaction, hash: %s ' % item['transaction_hash'])
else:
raise DropItem("Already Got Transaction: %s" % item['transaction_hash'])
except DropItem as duplicate :
logging.info(duplicate)
except (Exception, psycopg2.Error) as error:
connection.rollback()
spider_name = spider.name,
spider_version = spider.version,
location_script_file = str(__name__)
location_script_function = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
trace_back = traceback.format_exc()
error_type = str(type(error))
error = str(error)
item_url = item["item_url"]
spiderError(spider_name, spider_version, location_script_file, location_script_function, trace_back, error_type, error, item_url)
return item
class QrlnetworkPipeline_address:
def open_spider(self, spider):
cur = connection.cursor()
def close_spider(self, spider):
cur.close()
connection.close()
def process_item(self, item, spider):
if not isinstance(item, QRLNetworkAddressItem):
return item
valid = True
for data in item:
if not data:
valid = False
logging.error('Missing data in address', data)
raise DropItem("Missing {0}!".format(data))
if valid:
try:
datetimeNow = datetime.now()
cur.execute('SELECT "wallet_address" FROM public."qrl_wallet_address" WHERE "wallet_address" = %s', (item['wallet_address'],))
dup_check = len(cur.fetchall())
if dup_check == 0:
cur.execute('INSERT INTO public. "qrl_wallet_address" (\
"wallet_address", "address_balance", "address_nonce",\
"address_ots_bitfield_used_page", "address_used_ots_key_count", "address_transaction_hash_count",\
"address_tokens_count", "address_slaves_count", "address_lattice_pk_count",\
"address_multi_sig_address_count", "address_multi_sig_spend_count","address_inbox_message_count",\
"address_foundation_multi_sig_spend_txn_hash", "address_foundation_multi_sig_vote_txn_hash", "address_unvotes",\
"address_proposal_vote_stats","spider_name", "spider_version", "address_added_datetime" \
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', (
item['wallet_address'], int(item['address_balance']),int(item['address_nonce']),
int(item["address_ots_bitfield_used_page"]), int(item["address_used_ots_key_count"]), int(item["address_transaction_hash_count"]),
int(item["address_tokens_count"]) , int(item["address_slaves_count"]), int(item["address_lattice_pk_count"]),
int(item["address_multi_sig_address_count"]), int(item["address_multi_sig_spend_count"]),int(item["address_inbox_message_count"]),
item["address_foundation_multi_sig_spend_txn_hash"],item["address_foundation_multi_sig_vote_txn_hash"],item["address_unvotes"],
item["address_proposal_vote_stats"], item["spider_name"],item["spider_version"], datetimeNow ))
connection.commit()
logging.warning('Got New Wallet Address: %s ' % item['wallet_address'])
else:
update_address = 'UPDATE public. "qrl_wallet_address" SET "address_balance" = %s, "address_nonce" = %s ,\
"address_ots_bitfield_used_page"= %s , "address_used_ots_key_count"= %s , "address_transaction_hash_count"= %s ,\
"address_tokens_count"= %s , "address_slaves_count"= %s , "address_lattice_pk_count"= %s ,\
"address_multi_sig_address_count"= %s , "address_multi_sig_spend_count"= %s ,"address_inbox_message_count"= %s ,\
"address_foundation_multi_sig_spend_txn_hash"= %s , "address_foundation_multi_sig_vote_txn_hash"= %s , "address_unvotes"= %s ,\
"address_proposal_vote_stats"= %s ,"spider_name"= %s , "spider_version"= %s WHERE "wallet_address" = %s'
cur.execute(update_address,(int(item['address_balance']), int(item["address_nonce"]), int(item["address_ots_bitfield_used_page"]),
int(item["address_used_ots_key_count"]), int(item["address_transaction_hash_count"]), int(item["address_tokens_count"]),
int(item["address_slaves_count"]), int(item["address_lattice_pk_count"]), int(item["address_multi_sig_address_count"]),
int(item["address_multi_sig_spend_count"]),int(item["address_inbox_message_count"]), item["address_foundation_multi_sig_spend_txn_hash"],
item["address_foundation_multi_sig_vote_txn_hash"],item["address_unvotes"],item["address_proposal_vote_stats"],
item["spider_name"],item["spider_version"],item["wallet_address"] ))
connection.commit()
logging.info('Updated Wallet Address: %s ' % item['wallet_address'])
except DropItem as duplicate :
logging.warning(duplicate)
except (Exception, psycopg2.Error) as error:
connection.rollback()
spider_name = spider.name,
spider_version = spider.version,
location_script_file = str(__name__)
location_script_function = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
trace_back = traceback.format_exc()
error_type = str(type(error))
error = str(error)
item_url = item["item_url"]
spiderError(spider_name, spider_version, location_script_file, location_script_function, trace_back, error_type, error, item_url)
return item
class QrlnetworkPipeline_missed_items:
def open_spider(self, spider):
cur = connection.cursor()
def close_spider(self, spider):
cur.close()
connection.close()
def process_item(self, item, spider):
if not isinstance(item, QRLNetworkMissedItem):
return item
valid = True
for data in item:
if not data:
valid = False
logging.error('Missing data in missed items', data)
raise DropItem("Missing {0}!".format(data))
try:
if valid:
cur.execute('INSERT INTO public. "qrl_blockchain_missed_items" (\
"spider_name","spider_version", "location_script_file",\
"location_script_function", "trace_back", "error_type",\
"error_name", "item_url", "error_timestamp"\
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)', (
item["spider_name"], item["spider_version"], item["location_script_file"], item["location_script_function"], json.dumps(item["trace_back"]),
item["error_type"], item["error"], item["item_url"], datetime.now()))
connection.commit()
logging.warning('Got ERROR - check db')
return item
except (Exception, psycopg2.Error) as error:
connection.rollback()
spider_name = spider.name,
spider_version = spider.version,
location_script_file = str(__name__)
location_script_function = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
trace_back = traceback.format_exc()
error_type = str(type(error))
error = str(error)
item_url = item["item_url"]
spiderError(spider_name, spider_version, location_script_file, location_script_function, trace_back, error_type, error, item_url)
def spiderError(spider_name, spider_version, location_script_file, location_script_function, trace_back, error_type, error, item_url):
cur = connection.cursor()
try:
cur.execute('INSERT INTO public. "qrl_blockchain_missed_items" (\
"spider_name","spider_version", "location_script_file",\
"location_script_function", "trace_back", "error_type",\
"error_name", "item_url", "error_timestamp"\
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)', (
spider_name, spider_version, location_script_file,
location_script_function, json.dumps(trace_back), error_type, error, item_url, datetime.now()))
connection.commit()
logging.warning('Got ERROR - check db')
except (Exception,psycopg2.Error) as error:
connection.rollback()
location_script_file = str(__name__)
location_script_function = str(sys._getframe().f_code.co_name) + ', exception'
trace_back = traceback.format_exc()
error = str(error)
cur.execute('INSERT INTO public. "qrl_blockchain_missed_items" (\
"spider_name","spider_version", "location_script_file",\
"location_script_function", "trace_back", "error_type",\
"error_name", "item_url", "error_timestamp"\
) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)', (
'', '', location_script_file, location_script_function,
json.dumps(trace_back), 'Unknown Error', error, '', datetime.now()))
connection.commit()
logging.warning('Got Unkown ERROR - check db')
```
#### File: qrlNetwork/spiders/qrl_network_spider.py
```python
import os
import scrapy
import logging
import re
import psycopg2
import json
import marshal
import numpy as np
import pandas as pd
import sys
import traceback
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
from ..items import QRLNetworkBlockItem, QRLNetworkTransactionItem, QRLNetworkAddressItem, QRLNetworkMissedItem
from ..settings import connection , cur, scrap_url
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
DOCUMENT_DIR = os.path.join(PROJECT_ROOT, 'Documenten')
def list_integer_to_hex(list):
array = bytearray(list) # create byte array from list of integers
return bytearray.hex(array) # hex the byte array -> result tx hash, needed for api call
class QRLNetworkSpider(scrapy.Spider):
name = "qrl_network_spider"
version = "0.25"
start_urls = []
def start_requests(self):
self.crawler.stats.set_value("spiderName", self.name)
self.crawler.stats.set_value("spiderVersion", self.version)
yield scrapy.Request(
url='https://explorer.theqrl.org/api/blockheight',
callback=self.parse,
errback=self.errback_conn,
#meta={"item": item},
)
def parse(self, response):
json_response = json.loads(response.body)
cur.execute('SELECT "block_number" FROM public."qrl_blockchain_blocks" ORDER BY "block_number" DESC LIMIT 1')
block_in_database = cur.fetchone()
if block_in_database != None:
last_block_scraped = int(block_in_database[0]) # check latest block in data base
else:
last_block_scraped = 0
diff_with_current_blockheight = abs(json_response["blockheight"] - last_block_scraped) # calculate difference between latest block and block in database
if json_response["found"] == True and diff_with_current_blockheight != 0 :
for number in range(last_block_scraped+1,json_response["blockheight"]+1 ): #last_block_scraped,json_response["blockheight"]+1
#cur.execute('SELECT "block_number" FROM public."qrl_blockchain_blocks" ORDER BY "block_number" ASC')
#listA = [item[0] for item in cur.fetchall()]
#res = [x for x in range(listA[0], listA[-1]+1) if x not in listA]
#print(res)
#for number in res:
#for number in range(1212497,1263495):
block_api_url = scrap_url + '/api/block/' + str(number)
yield scrapy.Request(
url=block_api_url,
callback=self.parse_block,
errback=self.errback_conn,
#meta={"item": item},
)
def parse_block(self, response):
item_block = QRLNetworkBlockItem()
json_response = json.loads(response.body)
item_block["item_url"] = response.url
try:
item_block["spider_name"] = self.name
item_block["spider_version"] = self.version
item_block["block_result"] = json_response["result"]
item_block["block_found"] = json_response["found"]
# block_extended
block_extended = json_response["block_extended"]
item_block["block_size"] = block_extended["size"]
# block_extended > header
block_extended_header = block_extended["header"]
# block_extended > header > hash_header
block_hash_header = block_extended_header["hash_header"]
item_block["block_hash_header_type"] = block_hash_header["type"]
item_block["block_hash_header_data"] = list_integer_to_hex(block_hash_header["data"])
# block_extended > header > hash_header_prev
block_hash_header_prev = block_extended_header["hash_header_prev"]
item_block["block_hash_header_type_prev"] = block_hash_header_prev["type"]
item_block["block_hash_header_data_prev"] = list_integer_to_hex(block_hash_header_prev["data"])
# block_extended > header > merkle_root
block_merkle_root= block_extended_header["merkle_root"]
item_block["block_merkle_root_type"] = block_merkle_root["type"]
item_block["block_merkle_root_data"] = list_integer_to_hex(block_merkle_root["data"])
item_block["block_number"] = block_extended_header["block_number"]
item_block["block_found_datetime"] = block_extended_header["timestamp_seconds"]
item_block["block_found_timestamp_seconds"] = block_extended_header["timestamp_seconds"]
item_block["block_reward_block"] = block_extended_header["reward_block"]
item_block["block_reward_fee"] = block_extended_header["reward_fee"]
item_block["block_mining_nonce"] = block_extended_header["mining_nonce"]
item_block["block_extra_nonce"] = block_extended_header["extra_nonce"]
item_block["block_number_of_transactions"] = len(block_extended["extended_transactions"])
if item_block["block_found"] == True:
yield QRLNetworkBlockItem(item_block)
for transaction in block_extended["extended_transactions"]:
transaction_tx = transaction["tx"]
transaction_tx_transaction_hash = transaction_tx["transaction_hash"]
tx_hash = list_integer_to_hex(transaction_tx_transaction_hash["data"])
# create api url
transaction_api_url = scrap_url + '/api/tx/' + str(tx_hash)
yield scrapy.Request(
url=transaction_api_url,
callback=self.parse_transaction,
errback=self.errback_conn,
meta={"item_block": item_block},
)
else:
print('Block Not Found Yet By The BlockChain')
pass
except (Exception) as error:
item_missed = QRLNetworkMissedItem()
item_missed["spider_name"] = self.name
item_missed["spider_version"] = self.version
item_missed["location_script_file"] = str(__name__)
item_missed["location_script_function"] = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
item_missed["trace_back"] = traceback.format_exc(limit=None, chain=True)
item_missed["error_type"] = str(type(error))
item_missed["error"] = str(error)
item_missed["item_url"] = response.url
yield QRLNetworkMissedItem(item_missed)
def parse_transaction(self, response):
item_block = response.meta['item_block']
item_transaction = QRLNetworkTransactionItem()
json_response = json.loads(response.body)
item_transaction["item_url"]=response.url
try:
transaction = json_response["transaction"]
item_transaction["spider_name"] = self.name
item_transaction["spider_version"] = self.version
item_transaction["transaction_result"] = json_response["result"]
item_transaction["transaction_found"] = json_response["found"]
# transaction > header
transaction_header = transaction["header"]
item_transaction["transaction_block_number"] = item_block["block_number"]
item_transaction["block_found_datetime"] = item_block["block_found_datetime"]
item_transaction["block_found_timestamp_seconds"] = item_block["block_found_timestamp_seconds"]
# transaction > tx
transaction_tx = transaction["tx"]
item_transaction["transaction_type"] = transaction_tx["transactionType"]
item_transaction["transaction_nonce"] = int(transaction_tx["nonce"])
item_transaction["master_addr_fee"] = int(transaction_tx["fee"])
# transaction > tx > master_addr
master_addr = transaction_tx["master_addr"]
item_transaction["master_addr_type"] = master_addr["type"]
item_transaction["master_addr_data"] = list_integer_to_hex(master_addr["data"])
# transaction > tx > public_key
public_key = transaction_tx["public_key"]
item_transaction["public_key_type"] = public_key["type"]
item_transaction["public_key_data"] = list_integer_to_hex(public_key["data"])
# transaction > tx > signature
signature = transaction_tx["signature"]
item_transaction["signature_type"] = signature["type"]
item_transaction["signature_data"] = list_integer_to_hex(signature["data"])
if transaction_tx["transactionType"] == "transfer":
transfer_list = []
transfer_type_list = []
transaction_tx_transfer = transaction_tx["transfer"]
amounts_list = transaction_tx_transfer["amounts"]
# transaction > tx > transfer > addrs_to
for single_transfer in transaction_tx_transfer["addrs_to"]:
transaction_addrs_to_type = single_transfer["type"]
transaction_receiving_wallet_address_hex = list_integer_to_hex(single_transfer["data"]) # get receiving address in hex
transfer_type_list.append(transaction_addrs_to_type)
transfer_list.append(transaction_receiving_wallet_address_hex)
transfer_address_amount_combined = list(zip(transfer_list, amounts_list,transfer_type_list))
# transaction > tx > transfer > amounts
for address_with_amount in transfer_address_amount_combined:
transaction_sending_wallet_address = transaction["addr_from"]
item_transaction["transaction_sending_wallet_address"] = "Q"+ list_integer_to_hex(transaction_sending_wallet_address["data"])
item_transaction["transaction_receiving_wallet_address"] = "Q" + address_with_amount[0]
item_transaction["transaction_amount_send"] = address_with_amount[1]
item_transaction["transaction_addrs_to_type"] = address_with_amount[2]
# transaction > tx > transaction_hash
transaction_tx_transaction_hash = transaction_tx["transaction_hash"]
item_transaction["transaction_hash"] = list_integer_to_hex(transaction_tx_transaction_hash["data"])
yield QRLNetworkTransactionItem(item_transaction)
for scrape_wallet_url in [item_transaction["transaction_receiving_wallet_address"] ,item_transaction["transaction_sending_wallet_address"], ] :
yield scrapy.Request(
url= scrap_url + "/api/a/" + scrape_wallet_url,
callback=self.parse_address,
errback=self.errback_conn,
meta={"item_transaction": item_transaction,}
)
elif transaction_tx["transactionType"] == "coinbase":
# transaction > tx > coinbase
transaction_tx_coinbase = transaction_tx["coinbase"]
coinbase_transfer = transaction_tx_coinbase["addr_to"]
transaction_sending_wallet_address = transaction["addr_from"]
item_transaction["transaction_sending_wallet_address"] = "Q"+ list_integer_to_hex(transaction_sending_wallet_address["data"])
item_transaction["transaction_receiving_wallet_address"] = "Q" + list_integer_to_hex(coinbase_transfer["data"])
item_transaction["transaction_amount_send"] = transaction_tx_coinbase["amount"]
item_transaction["transaction_addrs_to_type"] = coinbase_transfer["type"]
# transaction > tx > transaction_hash
coinbase_tx_transaction_hash = transaction_tx["transaction_hash"]
item_transaction["transaction_hash"] = list_integer_to_hex(coinbase_tx_transaction_hash["data"])
yield QRLNetworkTransactionItem(item_transaction)
yield scrapy.Request(
url= scrap_url + "/api/a/" + item_transaction["transaction_receiving_wallet_address"],
callback=self.parse_address,
errback=self.errback_conn,
meta={"item_transaction": item_transaction,},
)
elif transaction_tx["transactionType"] == "slave" :
# transaction > tx > slave
transaction_tx_slave = transaction_tx["slave"]
for slave_pk in transaction_tx_slave["slave_pks"] :
transaction_sending_wallet_address = transaction["addr_from"]
item_transaction["transaction_sending_wallet_address"] = "Q"+ list_integer_to_hex(transaction_sending_wallet_address["data"])
item_transaction["transaction_receiving_wallet_address"] = "Q" + list_integer_to_hex(slave_pk["data"])
item_transaction["transaction_amount_send"] = 0 #address_with_access_types[1]
item_transaction["transaction_addrs_to_type"] = '' #address_with_access_types[2]
transaction_tx_transaction_hash = transaction_tx["transaction_hash"]
item_transaction["transaction_hash"] = list_integer_to_hex(transaction_tx_transaction_hash["data"])
yield QRLNetworkTransactionItem(item_transaction)
except (Exception) as error:
item_missed = QRLNetworkMissedItem()
item_missed["spider_name"] = self.name
item_missed["spider_version"] = self.version
item_missed["location_script_file"] = str(__name__)
item_missed["location_script_function"] = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
item_missed["trace_back"] = traceback.format_exc(limit=None, chain=True)
item_missed["error_type"] = str(type(error))
item_missed["error"] = str(error)
item_missed["item_url"] = response.url
yield QRLNetworkMissedItem(item_missed)
def parse_address(self, response):
item_transaction = response.meta['item_transaction']
item_address = QRLNetworkAddressItem()
json_response = json.loads(response.body)
item_address["item_url"] = response.url
try:
json_state = json_response["state"]
item_address["spider_name"] = self.name
item_address["spider_version"] = self.version
item_address["wallet_address"] = json_state["address"]
item_address["address_balance"] = json_state["balance"]
item_address["address_nonce"] = json_state["nonce"]
item_address["address_ots_bitfield_used_page"] = json_state["ots_bitfield_used_page"]
item_address["address_used_ots_key_count"] = json_state["used_ots_key_count"]
item_address["address_transaction_hash_count"] = json_state["transaction_hash_count"]
item_address["address_tokens_count"] = json_state["tokens_count"]
item_address["address_slaves_count"] = json_state["slaves_count"]
item_address["address_lattice_pk_count"] = json_state["lattice_pk_count"]
item_address["address_multi_sig_address_count"] = json_state["multi_sig_address_count"]
item_address["address_multi_sig_spend_count"] = json_state["multi_sig_spend_count"]
item_address["address_inbox_message_count"] = json_state["inbox_message_count"]
item_address["address_foundation_multi_sig_spend_txn_hash"] = json_state["foundation_multi_sig_spend_txn_hash"]
item_address["address_foundation_multi_sig_vote_txn_hash"] = json_state["foundation_multi_sig_vote_txn_hash"]
item_address["address_unvotes"] = json_state["unvotes"]
item_address["address_proposal_vote_stats"] = json_state["proposal_vote_stats"]
item_address["address_proposal_vote_stats"] = json_state["proposal_vote_stats"]
yield QRLNetworkAddressItem(item_address)
except (Exception) as error:
item_missed = QRLNetworkMissedItem()
item_missed["spider_name"] = self.name
item_missed["spider_version"] = self.version
item_missed["location_script_file"] = str(__name__)
item_missed["location_script_function"] = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
item_missed["trace_back"] = traceback.format_exc(limit=None, chain=True)
item_missed["error_type"] = str(type(error))
item_missed["error"] = str(error)
item_missed["item_url"] = response.url
yield QRLNetworkMissedItem(item_missed)
def errback_conn(self, failure):
item_missed = QRLNetworkMissedItem()
item_missed["spider_name"] = self.name
item_missed["spider_version"] = self.version
item_missed["location_script_file"] = str(__name__)
item_missed["location_script_function"] = str(__class__.__name__) + (', ') + str(sys._getframe().f_code.co_name)
if failure.check(HttpError):
item_missed["error"] = str(failure.__class__)
item_missed["error_type"] = str(failure.value.response).split(" ")
item_missed["item_url"] = failVal[1]
item_missed["trace_back"] = failVal[0]
yield QRLNetworkMissedItem(item_missed)
elif failure.check(DNSLookupError):
item_missed["error"] = str(failure.__class__)
item_missed["errorType"] = str(failure.request).split(" ")
item_missed["item_url"] = failVal[1]
item_missed["trace_back"] = failVal[0]
yield QRLNetworkMissedItem(item_missed)
elif failure.check(TimeoutError, TCPTimedOutError):
item_missed["error"] = str(failure.__class__)
item_missed["error_type"] = str(failure.request).split(" ")
item_missed["item_url"] = failVal[1]
item_missed["trace_back"] = failVal[0]
yield QRLNetworkMissedItem(item_missed)
#def spiderError(missedIn,itemError, itemErrorType, fileName,itemUrl, missedItemType):
# cur = connection.cursor()
# error_timestamp = datetime.now()
# try:
# cur.execute('INSERT INTO public. "qrl_blockchain_missed_items" (\
# "spider_name","spider_version", "missed_in",\
# "item_error", "item_error_type", "file_name", "item_url",\
# "missed_item_type", "error_timestamp"\
# ) VALUES(%s,%s,%s,%s,%s,%s,%s,%s, %s)', (
# QRLNetworkSpider.name,QRLNetworkself.version,missedIn,itemError,itemErrorType,fileName, itemUrl, missedItemType,error_timestamp))
# connection.commit()
# logging.warning('Got ERROR - check db')
# except Exception as error:
# connection.rollback()
# self.crawler.engine.close_spider(self, 'log message')
``` |
{
"source": "12remember/QRLtoDatabase",
"score": 2
} |
#### File: QRLtoDatabase/utils/getData.py
```python
import plyvel
import argparse
import base64
import binascii
from datetime import datetime
import json
import sys
from qrl.core.PaginatedData import PaginatedData
from qrl.core.PaginatedBitfield import PaginatedBitfield
from qrl.core.misc.db import DB
from qrl.generated import qrl_pb2
from google.protobuf.json_format import MessageToJson, Parse, MessageToDict
import multiprocessing
class getData:
def getBlockHeight(source):
dbb = plyvel.DB(source)
blockheight = int.from_bytes(dbb.get(b'blockheight'), byteorder='big', signed=False)
return blockheight
def getBlockData(i, source ):
dbb = plyvel.DB(source)
pbdata = qrl_pb2.Block()
block_number_mapping = qrl_pb2.BlockNumberMapping()
hashHeader = Parse(dbb.get(str(i).encode()), block_number_mapping).headerhash
pbdata.ParseFromString(bytes(dbb.get(hashHeader)))
dictData = MessageToDict(pbdata)
# BlockDataPoint and BlockExtended not working yet
#BlockDataPointData = qrl_pb2.BlockDataPoint()
#BlockDataPointData.ParseFromString(bytes(db.get(hashHeader)))
#print(BlockDataPointData)
#BlockDataPointDic = MessageToDict(BlockDataPointData)
#print(BlockDataPointDic)
#print('BlockDataPoint')
#LatticePKData = qrl_pb2.LatticePK()
#LatticePKData.ParseFromString(db.get(addrByte))
#LatticePKDic = MessageToDict(LatticePKData)
#test = Parse(db.get(str(i).encode()), block_number_mapping)
#BlockExtendedData = qrl_pb2.BlockExtended()
#BlockExtendedData.ParseFromString(bytes(db.get(test)))
#print(BlockExtendedData)
#BlockExtendedDic = MessageToDict(BlockExtendedData)
#print(BlockExtendedDic)
#print('BlockExtended')
blockData = {}
blockData["block_number"] = i
blockData["hash_header"] = hashHeader.hex()
blockData["timestamp"] = datetime.fromtimestamp(int(dictData["header"]["timestampSeconds"]))
blockData["reward_block"] = dictData["header"]["rewardBlock"]
blockData["merkle_root"] = dictData["header"]["merkleRoot"]
if "hashHeaderPrev" in dictData["header"]:
blockData["hash_header_prev"] = base64.b64decode(dictData["header"]["hashHeaderPrev"]).hex()
if "rewardFee" in dictData["header"]:
blockData["reward_fee"] = dictData["header"]["rewardFee"]
if "miningNonce" in dictData["header"]:
blockData["mining_nonce"] = int(dictData["header"]["miningNonce"])
if "extraNonce" in dictData["header"]:
blockData["extra_nonce"] = int(dictData["header"]["extraNonce"])
if "genesisBalance" in dictData:
blockData["genesis_balance"] = dictData["genesisBalance"][0]["balance"]
if "transactions" in dictData:
blockData["transactions"] = dictData["transactions"]
return blockData
def getTransactionData(t, block_number, timestamp):
tData = {}
tData["block_number"], tData["timestamp"] = block_number, timestamp
tData["transaction_hash"] = base64.b64decode(t["transactionHash"]).hex()
if "masterAddr" in t:
tData["master_addr"] = "Q" + base64.b64decode(t["masterAddr"]).hex()
if "publicKey" in t:
tData["public_key"] = base64.b64decode(t["publicKey"]).hex()
if "signature" in t:
tData["signature"] = base64.b64decode(t["signature"]).hex()
if "nonce" in t:
tData["nonce"] = t["nonce"]
if "fee" in t:
tData["fee"] = t["fee"]
return tData
def getTransactionDataCoinbase(t, block_number, timestamp):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["addr_to"] = "".join(["Q" , base64.b64decode(t["coinbase"]["addrTo"]).hex()])
tData["amount"] = t["coinbase"]["amount"]
return tData
def getTransactionDataTransfer(t, block_number, timestamp, transfer):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["addr_to"] = "".join(["Q" , base64.b64decode(transfer["addr_to"]).hex()])
tData["amount"] = transfer["amount"]
return tData
def getTransactionDataToken(t, block_number, timestamp):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["symbol"] = base64.b64decode(t["token"]["symbol"]).decode("utf-8")
tData["name"] = base64.b64decode(t["token"]["name"]).decode("utf-8")
tData["owner"] = "".join(["Q" , base64.b64decode(t["token"]["owner"]).hex()])
tData["initial_balances"] = t["token"]["initialBalances"]
tData["initial_balances"] = list(map(lambda x: json.dumps(x), tData["initial_balances"]))
if "decimals" in t["token"]:
tData["decimals"] = t["token"]["decimals"]
return tData
def getTransactionDataMessage(t, block_number, timestamp):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["message_hash"] = t["message"]["messageHash"]
try:
messageHash = base64.b64decode(t["message"]["messageHash"]).decode("utf-8")
tData["message_text"] = messageHash
except:
messageHash = base64.b64decode(t["message"]["messageHash"]).hex()
tData["message_text"] = messageHash
#https://github.com/theQRL/qips/blob/master/qips/QIP002.md
if messageHash.startswith("afaf"):
if messageHash.startswith("afafa1"):
try:
docText = binascii.a2b_hex(messageHash[46:]).decode("utf-8")
except:
docText = binascii.a2b_hex(messageHash[46:]).hex()
tData["message_text"] = " ".join(["[Doc notarization] SHA1:" , messageHash[6:46] , "TEXT:" , docText])
elif messageHash.startswith("afafa2"):
try:
docText = binascii.a2b_hex(messageHash[70:]).decode("utf-8")
except:
docText = binascii.a2b_hex(messageHash[70:]).hex()
tData["message_text"] = " ".join(["[Doc notarization] SHA256:" , messageHash[6:70] , "TEXT:" , docText])
elif messageHash.startswith("afafa3"):
try:
docText = binascii.a2b_hex(messageHash[38:]).decode("utf-8")
except:
docText = binascii.a2b_hex(messageHash[38:]).hex()
tData["message_text"] = " ".join(["[Doc notarization] MD5:" , messageHash[6:38] , "TEXT:" , docText ])
#https://github.com/theQRL/message-transaction-encoding
elif messageHash.startswith("0f0f"):
msgHeader = "[Unknown]"
msgBegin = 8
text = ""
if messageHash.startswith("0f0f0000") or messageHash.startswith("0f0f0001"):
msgHeader = "[Reserved] "
elif messageHash.startswith("0f0f0002"):
if messageHash.startswith("0f0f0002af"):
msgHeader = "[Keybase-remove] "
elif messageHash.startswith("0f0f0002aa"):
msgHeader = "[Keybase-add] "
else:
msgHeader = "".join(["[Keybase-" , messageHash[8:10] , "]" ])
msgBegin = 12
try:
user = binascii.a2b_hex(messageHash[msgBegin:].split("20")[0]).decode("utf-8")
keybaseHex = binascii.a2b_hex(messageHash[msgBegin + len(user)*2 + 2:]).hex()
text = "".join(["USER:" , user , " KEYBASE_HEX:" , keybaseHex ])
except:
text = ""
elif messageHash.startswith("0f0f0003"):
if messageHash.startswith("0f0f0002af"):
msgHeader = "[Github-remove] "
elif messageHash.startswith("0f0f0002aa"):
msgHeader = "[Github-add] "
else:
msgHeader = "".join(["[Github-" , messageHash[8:10] , "] " ])
msgBegin = 18
text = binascii.a2b_hex(messageHash[msgBegin:]).hex()
elif messageHash.startswith("0f0f0004"):
msgHeader = "[Vote] "
if len(text) == 0:
try:
text = binascii.a2b_hex(messageHash[msgBegin:]).decode("utf-8")
except:
try:
text = binascii.a2b_hex(messageHash[msgBegin:]).hex()
except:
text = str(messageHash[msgBegin:])
tData["message_text"] = " ".join([msgHeader , text ])
return tData
def getTransactionDataLatticePk(t, block_number, timestamp):
tData = getData.getTransactionData(t, block_number, timestamp)
print('&&&&&&&&&&&&&')
print('latticePk - T')
for key, value in t.items() :
print(key)
print('--------------------')
print('--------------------')
for key, value in t["latticePk"].items() :
print(key)
print('^^^^^^^^^^^^^^^^')
tData["kyber_pk"] = t["latticePk"]["kyberPK"]
tData["dilithium_pk"] = t["latticePk"]["dilithiumPK"]
return tData
def getTransactionDataSlave(t, block_number, timestamp, transfer):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["slave_pk"] = "".join(["Q" , base64.b64decode(transfer["slave_pk"]).hex()])
tData["access_type"] = transfer["access_type"]
return tData
def getTransactionDataTransferToken(t, block_number, timestamp, transfer):
tData = getData.getTransactionData(t, block_number, timestamp)
tData["token_txhash"] = transfer["token_txhash"]
tData["addr_to"] = "".join(["Q" , base64.b64decode(transfer["addr_to"]).hex()])
tData["amount"] = transfer["amount"]
return tData
def getTransactionDataOthers(t, block_number, timestamp):
tData = getData.getTransactionData(t, block_number, timestamp)
print('------------------------')
print('not transactionProcessed')
print('------------------------')
print(t)
print('------------------------')
if "multiSigCreate" in t:
tData['type'] = "multiSigCreate"
if "multiSigSpend" in t:
tData['type'] = "multiSigSpend"
if "multiSigVote" in t:
tData['type'] = "multiSigVote"
if len(tData['type']) == 0:
tData['type'] = "unkown"
for key, value in tData.items() :
print(key)
print('--------------------')
print('--------------------')
print('transaction unkown')
sys.exit("transaction unkown")
tData['data'] = str(t)
return tData
def getAddressData(source, b64Addr, timeStamp):
try:
#addrData = qrl_pb2.AddressState()
addrData = qrl_pb2.OptimizedAddressState()
addrByte = base64.b64decode(b64Addr)
address = "Q" + addrByte.hex()
tree_dict = {
0: 256,
8: 256,
10: 256,
12 : 256,
14: 256,
16: 256,
18 : 256,
}
tree_height = int(address[4]) * 2
dbb = plyvel.DB(source)
addrData.ParseFromString(dbb.get(addrByte))
dictData = MessageToDict(addrData)
databasee = DB()
n = 0
false_loop = 0
OTSBitfieldByPageDic = []
while n < tree_dict[tree_height]:
page = (n // 8192) + 1
PaginatedBitfieldKey = PaginatedBitfield.generate_bitfield_key(PaginatedBitfield(False, databasee), addrByte, page)
obj = PaginatedBitfield(False, databasee)
obj.load_bitfield(addrByte, n)
ots_bitfield = obj.key_value[PaginatedBitfieldKey]
OTSBitfieldByPageDic.append(PaginatedBitfield.ots_key_reuse(ots_bitfield, n))
if PaginatedBitfield.ots_key_reuse(ots_bitfield, n) == False:
false_loop = false_loop + 1
if false_loop > 5:
break
# print(PaginatedBitfield.ots_key_reuse(ots_bitfield, n))
n = n + 1
OTSBitfieldByPageData = qrl_pb2.OTSBitfieldByPage()
OTSBitfieldByPageData.ParseFromString(dbb.get(addrByte))
# OTSBitfieldByPageDic = MessageToDict(OTSBitfieldByPageData)
#print(OTSBitfieldByPageDic)
#print('OTSBitfieldByPage')
DataList = qrl_pb2.DataList()
DataListData = qrl_pb2.DataList()
DataListData.ParseFromString(dbb.get(addrByte))
DataListDic = MessageToDict(DataListData)
#print(DataListDic)
#print('DataList')
BitfieldData = qrl_pb2.Bitfield()
BitfieldData.ParseFromString(dbb.get(addrByte))
BitfieldDic = MessageToDict(BitfieldData)
#print(BitfieldDic)
#print('Bitfield')
TransactionHashListData = qrl_pb2.TransactionHashList()
TransactionHashListData.ParseFromString(dbb.get(addrByte))
TransactionHashListDic = MessageToDict(TransactionHashListData)
#print(TransactionHashListDic)
#print('TransactionHashList')
LatticePKData = qrl_pb2.LatticePK()
LatticePKData.ParseFromString(dbb.get(addrByte))
LatticePKDic = MessageToDict(LatticePKData)
#print(LatticePKDic)
#print('LatticePK')
MultiSigAddressStateData = qrl_pb2.MultiSigAddressState()
MultiSigAddressStateData.ParseFromString(dbb.get(addrByte))
MultiSigAddressStateDic = MessageToDict(MultiSigAddressStateData)
#print(MultiSigAddressStateDic)
#print('MultiSigAddressStateDic')
MultiSigAddressesListData = qrl_pb2.MultiSigAddressesList()
MultiSigAddressesListData.ParseFromString(dbb.get(addrByte))
MultiSigAddressesListDic = MessageToDict(MultiSigAddressesListData)
#print(MultiSigAddressesListDic)
#print('MultiSigAddressesListDic')
addressData = {}
if "balance" in dictData:
addressData["balance"] = dictData["balance"]
else:
addressData["balance"] = "0"
if "nonce" in dictData:
addressData["nonce"] = dictData["nonce"]
if "usedOtsKeyCount" in dictData:
addressData["use_otskey_count"] = dictData["usedOtsKeyCount"]
if "transactionHashCount" in dictData:
addressData["transaction_hash_count"] = dictData["transactionHashCount"]
if "tokensCount" in dictData:
addressData["tokens_count"] = dictData["tokensCount"]
if "slavesCount" in dictData:
addressData["slaves_count"] = dictData["slavesCount"]
if OTSBitfieldByPageDic:
addressData["ots_bitfield"] = OTSBitfieldByPageDic
if "pageNumber" in OTSBitfieldByPageDic:
addressData["ots_bitfield_page_number"] = OTSBitfieldByPageDic["pageNumber"]
if "values" in DataListDic:
addressData["data_list"] = DataListDic["values"]
if "bitfields" in BitfieldDic:
addressData["bitfields"] = BitfieldDic["bitfields"]
if "hashes" in TransactionHashListDic:
addressData["transactionhash_list"] = TransactionHashListDic["hashes"]
if "kyberPk" in LatticePKDic:
addressData["kyber_pk"] = LatticePKDic["kyberPk"]
if "address" in MultiSigAddressStateDic:
addressData["multi_sig_addresses_hashes_address"] = MultiSigAddressStateDic["address"]
if "nonce" in MultiSigAddressStateDic:
addressData["multi_sig_addresses_hashes_nonce"] = MultiSigAddressStateDic["nonce"]
if "weights" in MultiSigAddressStateDic:
addressData["multi_sig_addresses_hashes_weights"] = MultiSigAddressStateDic["weights"]
if "hashes" in MultiSigAddressesListDic:
addressData["multi_sig_addresses_list_hashes"] = MultiSigAddressesListDic["hashes"]
addressData["last_seen"] = timeStamp
addressData["first_seen"] = timeStamp
addressData["address"] = address
return addressData
except Exception as e:
print(e)
raise
if __name__ == "__main__":
# Creates two processes
p1 = multiprocessing.Process(target=getData)
p2 = multiprocessing.Process(target=getData)
p3 = multiprocessing.Process(target=getData)
p4 = multiprocessing.Process(target=getData)
p5 = multiprocessing.Process(target=getData)
p6 = multiprocessing.Process(target=getData)
p7 = multiprocessing.Process(target=getData)
p8 = multiprocessing.Process(target=getData)
# Starts both processes
p1.start()
p2.start()
p3.start()
p4.start()
p5.start()
p6.start()
p7.start()
p8.start()
``` |
{
"source": "12sarah96/ranking",
"score": 3
} |
#### File: rankit/Ranker/UnsupervisedRanker.py
```python
from __future__ import division
from __future__ import absolute_import
import numpy as np
import pandas as pd
import scipy as sp
from rankit.Table import Table
from scipy.sparse import coo_matrix
from scipy.sparse.linalg import lsqr
from .matrix_build import fast_colley_build
from numpy.linalg import norm
class UnsupervisedRanker(object):
"""Base class for all unsupervised ranking algorithms."""
def rank(self, table, **kargs):
raise NotImplementedError("UnsupervisedRanker is a abstract class.")
def _showcase(self, table, ascending=False):
# one need to translate item index to item name.
indexlut = table.indexlut
rating = self.rating # iitm, rating
itemname = []
for row in rating.itertuples(index=False, name=None):
itemname.append(indexlut[row[0]])
rst = pd.DataFrame({
"name": itemname,
"rating": rating["rating"]})
rst['rank'] = rst.rating.rank(method='min', ascending=ascending).astype(np.int32)
return rst.sort_values(by=['rating', 'name'], ascending=ascending).reset_index(drop=True)
class MasseyRanker(UnsupervisedRanker):
"""Massey ranking system proposed by <NAME>: Statistical models applied to the rating of sports teams.
Bachelor's thesis, Bluefield College, 1997.
Core idea: The competition score difference is the rating difference, so one can solve a linear equation by minimize least square error.
Parameters
----------
drawMargin: [0, +Inf), default 0.
When absolute difference between two teams are smaller than drawMargin, this competition is considered as a tie.
"""
def __init__(self, drawMargin = 0.0):
self.drawMargin = drawMargin
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
drawMargin = self.drawMargin
data = table.table[['hidx', 'vidx', 'hscore', 'vscore', 'weight']]
m = data.shape[0]
n = table.itemnum
y = np.zeros(m)
dat = np.zeros(m*2, dtype=np.float)
col = np.zeros(m*2, dtype=np.int)
row = np.zeros(m*2, dtype=np.int)
for i, itm in enumerate(data.itertuples(index=False, name=None)):
row[i*2]=i; col[i*2]=itm[0]; dat[i*2]=itm[4];
row[i*2+1]=i; col[i*2+1]=itm[1]; dat[i*2+1]=-itm[4];
if np.abs(itm[2]-itm[3])<=drawMargin:
y[i]=0.0
else:
y[i] = itm[4]*(itm[2]-itm[3])
X = coo_matrix((dat, (row, col)), shape=(m, n))
X = X.tocsr()
rst = lsqr(X, y)
rating = rst[0]
if hasattr(self, "rating"):
self.rating["rating"] = rating
else:
self.rating = pd.DataFrame({
"iidx": np.arange(n, dtype=np.int),
"rating": rating})
return self._showcase(table, False)
class ColleyRanker(UnsupervisedRanker):
"""Colley ranking system proposed by <NAME>:
Colley's bias free college football ranking method: The colley matrix explained, 2002.
http://www.colleyrankings.com
Core idea: All team's rating starts from 0.5, and with evolvement of games, the rating of each player deviates from 0.5
according to probability of win. However, the average rating of all teams remains 0.5.
Parameters
----------
drawMargin: [0, +Inf), default 0.
When absolute difference between two teams are smaller than drawMargin, this competition is considered as a tie.
"""
def __init__(self, drawMargin = 0.0):
self.drawMargin = drawMargin
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
drawMargin = self.drawMargin
data = table.table[['hidx', 'vidx', 'hscore', 'vscore', 'weight']]
idx = data.iloc[:, :2]
score = data.iloc[:, 2:]
C, b = fast_colley_build(np.require(idx, dtype=np.int32), np.require(score, dtype=np.float64),
table.itemnum, drawMargin)
rating = sp.linalg.solve(C, b)
if hasattr(self, "rating"):
self.rating["rating"] = rating
else:
self.rating = pd.DataFrame({
"iidx": np.arange(table.itemnum, dtype=np.int),
"rating": rating})
return self._showcase(table, False)
class KeenerRanker(UnsupervisedRanker):
"""Keener ranking system proposed by <NAME>:
The Perron-Frobenius theorem and the ranking of football teams, SIAM Review, 35(1):80-93, 1993
The core idea are: 1. rating is proportional to real strength; 2. real strength is measured relatively by competitors' strength.
Parameters
----------
func: default None.
If set, the score difference should be transformed by the function first then used for rating calculation.
epsilon: [0, +Inf) default 1e-4
The small value that applies an interference to game result that force each team had at least one game with each other.
threshold: (0, +Inf), default 1e-4
The threshold that controls when the algorithm will converge.
"""
def __init__(self, func=None, epsilon=1e-4, threshold=1e-4):
self.func = func
self.epsilon = epsilon
self.threshold = threshold
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
func, epsilon, threshold = self.func, self.epsilon, self.threshold
mtx = pd.DataFrame(data={
'hidx': pd.concat([table.table.hidx, table.table.vidx]),
'vidx': pd.concat([table.table.vidx, table.table.hidx]),
'hscore': pd.concat([table.table.hscore, table.table.vscore]),
'vscore': pd.concat([table.table.vscore, table.table.hscore]),
'weight': pd.concat([table.table.weight, table.table.weight])
}, columns = ['hidx', 'vidx', 'hscore', 'vscore', 'weight']).reset_index(drop=True)
mtx['score'] = mtx.hscore+mtx.vscore
mtx['hscore'] = (mtx['hscore']+1)/(mtx['score']+2)
mtx['vscore'] = (mtx['vscore']+1)/(mtx['score']+2)
if func is not None:
mtx['hscore'] = mtx.hscore.apply(func)
mtx['vscore'] = mtx.vscore.apply(func)
mtx['hscore'] = mtx['hscore']*mtx['weight']
mtx['vscore'] = mtx['vscore']*mtx['weight']
mtx = mtx.groupby(['hidx', 'vidx'])[['hscore', 'vscore']].mean()
mtx.reset_index(inplace=True)
D = coo_matrix((mtx.hscore.values, (mtx.hidx.values, mtx.vidx.values)), shape=(table.itemnum, table.itemnum)).tocsr()
r = np.ones(table.itemnum)/table.itemnum
pr = np.ones(table.itemnum)
while norm(pr-r)>threshold:
pr = r
rho = np.sum(r)*epsilon
r = D.dot(r)+rho*np.ones(table.itemnum)
r /= np.sum(r)
if hasattr(self, "rating"):
self.rating["rating"] = r
else:
self.rating = pd.DataFrame({
"iidx": np.arange(table.itemnum, dtype=np.int),
"rating": r})
return self._showcase(table, False)
class MarkovRanker(UnsupervisedRanker):
"""Markov ranking is actually PageRank.
The core idea is voting: in each game, each team will vote to each other by the number of scores they lost.
If there are multiple games for a certain pair of player, their scores will be grouped and averaged.
Parameters
----------
restart: [0, 1], default 0.3.
Random walk with restart: in order to avoid black hole in random walk graph.
threshold: (0, +Inf), default 1e-4
The threshold that controls when the algorithm will converge.
"""
def __init__(self, restart=0.3, threshold=1e-4):
self.restart = restart
self.threshold = threshold
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
restart, threshold = self.restart, self.threshold
if restart>1 or restart<0:
raise ValueError("restart rate should be between 0 and 1.")
mtx = pd.DataFrame(data={
'hidx': pd.concat([table.table.hidx, table.table.vidx]),
'vidx': pd.concat([table.table.vidx, table.table.hidx]),
'hscore': pd.concat([table.table.hscore, table.table.vscore]),
'vscore': pd.concat([table.table.vscore, table.table.hscore]),
'weight': pd.concat([table.table.weight, table.table.weight])
}, columns = ['hidx', 'vidx', 'hscore', 'vscore', 'weight']).reset_index(drop=True)
mtx['hscore'] = mtx['hscore']*mtx['weight']
mtx['vscore'] = mtx['vscore']*mtx['weight']
mtx_ = mtx.groupby('hidx').vscore.sum().rename('htotalvote')
mtx = mtx.groupby(['hidx', 'vidx'])[['hscore', 'vscore']].mean()
mtx = pd.concat([mtx.reset_index().set_index('hidx'), mtx_], axis=1).reset_index()
mtx['prob'] = mtx['vscore']/mtx['htotalvote']
D = coo_matrix((mtx.prob.values, (mtx.hidx.values, mtx.vidx.values)), shape=(table.itemnum, table.itemnum)).transpose().tocsr()
r = np.ones(table.itemnum)/table.itemnum
pr = np.ones(table.itemnum)
while norm(pr-r)>threshold:
pr = r
vrestart = restart*np.ones(table.itemnum)/table.itemnum
r = (1-restart)*D.dot(r)+vrestart
r /= np.sum(r)
if hasattr(self, "rating"):
self.rating["rating"] = r
else:
self.rating = pd.DataFrame({
"iidx": np.arange(table.itemnum, dtype=np.int),
"rating": r})
return self._showcase(table, False)
class ODRanker(UnsupervisedRanker):
"""The Offence-defence rank tries to assign an offence rating and a defence rating to each team.
By saying "offence rating", we assume that a team has a high offence rating when it gained a lot of points
from a team good in defence. Vise versa. The offence rating of a team is associated with defence rating of each
competitor in a non-linear way. The defence rating of a team is also non-linearly related to each competitors'
offence rating.
Parameters
----------
method: {'summary', 'offence', 'defence'}, default 'summary'.
The rating to be returned. 'summary' is offence/defence.
epsilon: [0, +Inf) default 1e-4
The small value that forces a convergence.
threshold: (0, +Inf), default 1e-4
The threshold that controls when the algorithm will converge.
"""
def __init__(self, method='summary', epsilon=1e-4, threshold=1e-4):
self.method = method
self.epsilon = epsilon
self.threshold = threshold
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
method, epsilon, threshold = self.method, self.epsilon, self.threshold
mtx = pd.DataFrame(data={
'hidx': pd.concat([table.table.hidx, table.table.vidx]),
'vidx': pd.concat([table.table.vidx, table.table.hidx]),
'hscore': pd.concat([table.table.hscore, table.table.vscore]),
'vscore': pd.concat([table.table.vscore, table.table.hscore]),
'weight': pd.concat([table.table.weight, table.table.weight])
}, columns = ['hidx', 'vidx', 'hscore', 'vscore', 'weight']).reset_index(drop=True)
mtx['hscore'] = mtx['hscore']*mtx['weight']
mtx['vscore'] = mtx['vscore']*mtx['weight']
mtx = mtx.groupby(['hidx', 'vidx'])[['hscore', 'vscore']].mean()
mtx.reset_index(inplace=True)
D = coo_matrix((mtx.vscore.values, (mtx.hidx.values, mtx.vidx.values)), shape=(table.itemnum, table.itemnum)).tocsr()
Dt = D.transpose()
prevd = np.ones(table.itemnum)/table.itemnum
d = np.ones(table.itemnum)
while norm(prevd-d)>threshold:
prevd = d
o = Dt.dot(1/d)+np.ones(d.shape[0])*epsilon*(np.sum(1/d))
d = D.dot(1/o)+np.ones(o.shape[0])*epsilon*(np.sum(1/o))
o = Dt.dot(1/d)
if method=='summary':
r = o/d
elif method=='offence':
r = o
elif method=='defence':
r = d
else:
raise ValueError('output should be one of summary, offence or defence.')
if hasattr(self, "rating"):
self.rating["rating"] = r
else:
self.rating = pd.DataFrame({
"iidx": np.arange(table.itemnum, dtype=np.int),
"rating": r})
return self._showcase(table, True if method=='defence' else False)
class DifferenceRanker(UnsupervisedRanker):
"""This ranker targets at predicting score difference of games directly.
The difference of ratings are proportional to the difference of score.
"""
def rank(self, table):
"""Calculate the rank and rating with specified parameters.
Parameters
----------
table: Table
The record table to be ranked, should be a Table object.
Returns
-------
pandas.DataFrame, with column ['name', 'rating', 'rank']
"""
mtx = pd.DataFrame(data={
'hidx': pd.concat([table.table.hidx, table.table.vidx]),
'vidx': pd.concat([table.table.vidx, table.table.hidx]),
'hscore': pd.concat([table.table.hscore, table.table.vscore]),
'vscore': pd.concat([table.table.vscore, table.table.hscore]),
'weight': pd.concat([table.table.weight, table.table.weight])
}, columns = ['hidx', 'vidx', 'hscore', 'vscore', 'weight']).reset_index(drop=True)
mtx['score'] = mtx['hscore']-mtx['vscore']
mtx['score'] = mtx['score']*mtx['weight']
mtx = mtx.groupby(['hidx', 'vidx']).score.mean().reset_index()
r = mtx.groupby('hidx').score.sum()/table.itemnum
r = r.sort_index()
if hasattr(self, "rating"):
self.rating["rating"] = r.values
else:
self.rating = pd.DataFrame({
"iidx": np.arange(table.itemnum, dtype=np.int),
"rating": r})
return self._showcase(table, False)
```
#### File: 12sarah96/ranking/setup.py
```python
from setuptools import setup, find_packages
from setuptools.extension import Extension
import os
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
with open('requirements.txt') as f:
required = f.read().splitlines()
def my_build_ext(pars):
# import delayed:
from setuptools.command.build_ext import build_ext as _build_ext#
# include_dirs adjusted:
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
# Prevent numpy from thinking it is still in its setup process:
__builtins__.__NUMPY_SETUP__ = False
import numpy
self.include_dirs.append(numpy.get_include())
#object returned:
return build_ext(pars)
setup(name="rankit",
version="0.3.0",
packages=find_packages(exclude=['example']),
setup_requires=['numpy', 'Cython'],
install_requires=required,
include_package_data=True,
description="A simple ranking solution for matches.",
author="<NAME>",
author_email="<EMAIL>",
url="http://github.com/wattlebird/ranking",
license="MIT",
cmdclass={'build_ext' : my_build_ext},
ext_modules=[
Extension("rankit.Ranker.matrix_build",
["rankit/Ranker/matrix_build.pyx"]
)
],
classifiers=['Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
test_suite = 'nose.collector'
)
``` |
{
"source": "12souza/inhouse-bot",
"score": 3
} |
#### File: 12souza/inhouse-bot/inhouse-bot.py
```python
import asyncio
import discord
import json
import os
import random
from dotenv import load_dotenv
from discord.ext import commands
from discord.utils import get
client = commands.Bot(command_prefix = "!", case_insensitive=True)
client.remove_command('help')
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
msgList = []
playerList = {}
msg = " "
pickupActive = 0
mapchoice1 = None
mapChoice2 = None
mapChoice3 = None
mapChoice4 = None
mapSelected = []
mapVotes = {}
blueTeam = []
redTeam = []
alreadyVoted = []
vMsg = None
mapVote = 0
tmsg = None
ordered = []
mapsPicked = 0
captains = []
pickNum = 1
def PopulateTable():
global msgList
global playerList
global msg
# msgList = []
# for i in range(len(playerList)):
# msgList.append(playerList[i])
# msg = ''.join(msgList)
# return msg
msg = ", ".join([s for s in playerList.values()])
return msg
def DePopulatePickup():
global pickupActive
global mapsPicked
global mapVote
global msgList
global eList
global msg
global playerList
global blueTeam
global redTeam
global mapSelected
global ordered
global mapVotes
global captains
global pickNum
ordered = []
pickNum = 1
captains = []
mapVote = 0
mapsPicked = 0
pickupActive = 0
msgList = []
eList = []
blueTeam = []
redTeam = []
playerList = {}
msg = None
mapSelected = []
mapVotes = {}
def PickMaps():
global mapChoice1
global mapChoice2
global mapChoice3
global mapSelected
global mapVotes
global mapList
mapname = random.choice(mapList)
mapChoice1 = mapname
mapList.remove(mapname)
mapVotes[mapChoice1] = []
mapname = random.choice(mapList)
mapChoice2 = mapname
mapList.remove(mapname)
mapVotes[mapChoice2] = []
mapname = random.choice(mapList)
mapChoice3 = mapname
mapList.remove(mapname)
mapVotes[mapChoice3] = []
@client.command(pass_context=True)
async def pickup(ctx):
global pickupActive
global mapChoice1
global mapChoice2
global mapChoice3
global mapChoice4
global mapList
if pickupActive == 0 and mapVote == 0 and mapsPicked == 0 and pickNum == 1:
with open('maplist.json') as f:
mapList = json.load(f)
DePopulatePickup
await ctx.send("Pickup started, you can add in 10 seconds")
await asyncio.sleep(5)
await ctx.send("Pickup started, you can add in 5 seconds")
await asyncio.sleep(5)
await ctx.send("Type !add")
pickupActive = 1
PopulateTable()
await ctx.send("```\n Players\n" + msg + "```")
@client.command(pass_context=True)
async def cancel(ctx):
await ctx.send("pickup cancelled..")
DePopulatePickup()
@client.command(pass_context=True)
async def add(ctx):
global playerList
global pickupActive
global vMsg
global mapChoice1
global mapChoice2
global mapChoice3
global mapChoice4
global mapVotes
global mapVote
if(pickupActive == 1):
playerId = ctx.author.id
playerName = ctx.author.display_name
if playerId not in playerList:
playerList[playerId] = playerName
PopulateTable()
await ctx.send("```\n Players\n" + msg + "```")
if(len(playerList) >= 8):
# ensure that playerlist is first 8 people added
playerList = dict(list(playerList.items())[:8])
pickupActive = 0
PickMaps()
mapChoice4 = "New Maps"
mapVotes[mapChoice4] = []
vMsg = await ctx.send("```Vote for your map! When vote is stable, !lockmap\n\n"
+ "1️⃣ " + mapChoice1 + " " * (30 - len(mapChoice1)) + str(len(mapVotes[mapChoice1])) + " Votes\n"
+ "2️⃣ " + mapChoice2 + " " * (30 - len(mapChoice2)) + str(len(mapVotes[mapChoice2])) + " Votes\n"
+ "3️⃣ " + mapChoice3 + " " * (30 - len(mapChoice3)) + str(len(mapVotes[mapChoice3])) + " Votes\n"
+ "4️⃣ " + mapChoice4 + " " * (30 - len(mapChoice4)) + str(len(mapVotes[mapChoice4])) + " Votes```")
await vMsg.add_reaction("1️⃣")
await vMsg.add_reaction("2️⃣")
await vMsg.add_reaction("3️⃣")
await vMsg.add_reaction("4️⃣")
mapVote = 1
@client.command(pass_context=True)
async def lockmap(ctx):
global mapsPicked
global mapChoice1
global mapChoice2
global mapChoice3
global mapChoice4
global mapVotes
global mapVote
global vMsg
global mapList
global tMsg
sameVotes = []
ordered = []
highestVote = 0
winningMap = " "
# mapVotes[mapChoice1] = len(mapVotes[mapChoice1])
# mapVotes[mapChoice2] = len(mapVotes[mapChoice2])
# mapVotes[mapChoice3] = len(mapVotes[mapChoice3])
# mapVotes[mapChoice4] = len(mapVotes[mapChoice4])
# print(mapVotes)
if(mapVote == 1):
# for i in list(mapVotes):
# if(mapVotes[i] > highestVote):
# sameVotes.clear()
# sameVotes.append(i)
# highestVote = mapVotes[i]
# elif(mapVotes[i] == highestVote):
# highestVote = mapVotes[i]
# sameVotes.append(i)
# ordered = sorted(mapVotes, key=mapVotes.get, reverse=True)
# print(ordered)
# get top maps
mapTally = [(pickedMap, len(votes)) for (pickedMap, votes) in mapVotes.items()]
sameVotes = sorted(mapTally, key=lambda e: e[0], reverse=True)
highestVote = sameVotes[0][1]
sameVotes = [pickedMap for (pickedMap, votes) in sameVotes if votes == highestVote ]
winningMap = random.choice(sameVotes)
if(winningMap == "New Maps"):
mapVotes = {}
PickMaps()
mapChoice4 = ordered[1]
mapVotes[mapChoice4] = []
vMsg = await ctx.send("```Vote for your map! Be quick, you only have 60 seconds to vote..\n\n"
+ "1️⃣ " + mapChoice1 + " " * (30 - len(mapChoice1)) + str(len(mapVotes[mapChoice1])) + " Votes\n"
+ "2️⃣ " + mapChoice2 + " " * (30 - len(mapChoice2)) + str(len(mapVotes[mapChoice2])) + " Votes\n"
+ "3️⃣ " + mapChoice3 + " " * (30 - len(mapChoice3)) + str(len(mapVotes[mapChoice3])) + " Votes\n"
+ "4️⃣ " + mapChoice4 + " " * (30 - len(mapChoice4)) + str(len(mapVotes[mapChoice4])) + " Votes```")
await vMsg.add_reaction("1️⃣")
await vMsg.add_reaction("2️⃣")
await vMsg.add_reaction("3️⃣")
await vMsg.add_reaction("4️⃣")
else:
await ctx.send("The winning map is " + winningMap)
await ctx.send("Assign captains to begin the team picking process with !cap @cap1 @cap2")
mapVote = 0
mapsPicked = 1
@client.command(pass_context=True)
async def cap(ctx, cap1: discord.Member, cap2: discord.Member):
global tMsg
global mapsPicked
global captains
if(mapsPicked == 1):
if(cap1.display_name in playerList.values()):
blueTeam.append(cap1.display_name)
captains.append(cap1.display_name)
del playerList[cap1.id]
if(cap2.display_name in playerList.values()):
redTeam.append(cap2.display_name)
captains.append(cap2.display_name)
del playerList[cap2.id]
pMsgList = ["Player List: "]
bTeamMsgList = ["Blue Team: "]
rTeamMsgList = ["Red Team: "]
for i in playerList.values():
pMsgList.append(i + "\n")
for i in blueTeam:
bTeamMsgList.append(i + "\n")
for i in redTeam:
rTeamMsgList.append(i + "\n")
pMsg = ' '.join(pMsgList)
bMsg = ' '.join(bTeamMsgList)
rMsg = ' '.join(rTeamMsgList)
tMsg = await ctx.send("```\n" + pMsg + "\n\n" + bMsg + "\n\n" + rMsg + "```")
@client.command(pass_context=True)
async def remove(ctx):
global playerList
global pickupActive
global msg
if(pickupActive == 1):
if ctx.author.id in playerList:
del playerList[ctx.author.id]
PopulateTable()
await ctx.send("```\n Players\n" + msg + "```")
@client.command(pass_context=True)
async def pick(ctx, name: discord.Member):
global blueTeam
global redTeam
global tMsg
global playerList
global pickNum
playerName = name.display_name
playerId = name.id
captain = ctx.author.display_name
if captain in captains:
if captain in blueTeam:
if((pickNum == 1) or (pickNum == 3) or (pickNum == 6)):
del playerList[playerId]
blueTeam.append(playerName)
pickNum += 1
if captain in redTeam:
if((pickNum == 2) or (pickNum == 4) or (pickNum == 5) or (pickNum == 7)):
del playerList[playerId]
redTeam.append(playerName)
pickNum += 1
if(len(playerList) == 1):
blueTeam.append(playerList[0])
playerList = {}
bTeamMsgList = ["Blue Team: "]
rTeamMsgList = ["Red Team: "]
for i in blueTeam:
bTeamMsgList.append(i + " ")
for i in redTeam:
rTeamMsgList.append(i + " ")
bMsg = ' '.join(bTeamMsgList)
rMsg = ' '.join(rTeamMsgList)
await ctx.send("Here are the teams...")
await ctx.send("```\n" + bMsg + "\n\n" + rMsg + "```")
DePopulatePickup()
if(len(playerList) > 1):
pMsgList = ["Player List: "]
bTeamMsgList = ["Blue Team: "]
rTeamMsgList = ["Red Team: "]
for i in playerList:
pMsgList.append(i + " ")
for i in blueTeam:
bTeamMsgList.append(i + " ")
for i in redTeam:
rTeamMsgList.append(i + " ")
pMsg = ' '.join(pMsgList)
bMsg = ' '.join(bTeamMsgList)
rMsg = ' '.join(rTeamMsgList)
await tMsg.edit(content= "```\n" + pMsg + "\n\n" + bMsg + "\n\n" + rMsg + "```")
@client.event
async def on_reaction_add(reaction, user):
global mapVote
global playerList
global alreadyVoted
global mapVotes
#print(reaction.author.display_name)
if((reaction.message.channel.name == "inhouse") and (mapVote == 1) and (user.display_name != "inhouse-bot")):
if((reaction.emoji == '1️⃣') or (reaction.emoji == '2️⃣') or (reaction.emoji == '3️⃣') or (reaction.emoji == '4️⃣')):
if(user.id in playerList):
for i in list(mapVotes):
if(user.id in mapVotes[i]):
mapVotes[i].remove(user.id)
if(reaction.emoji == '1️⃣'):
mapVotes[mapChoice1].append(user.id)
if(reaction.emoji == '2️⃣'):
mapVotes[mapChoice2].append(user.id)
if(reaction.emoji == '3️⃣'):
mapVotes[mapChoice3].append(user.id)
if(reaction.emoji == '4️⃣'):
mapVotes[mapChoice4].append(user.id)
await vMsg.edit(content="```Vote for your map! Be quick, you only have 30 seconds to vote..\n\n"
+ "1️⃣ " + mapChoice1 + " " * (30 - len(mapChoice1)) + str(len(mapVotes[mapChoice1])) + " Votes\n"
+ "2️⃣ " + mapChoice2 + " " * (30 - len(mapChoice2)) + str(len(mapVotes[mapChoice2])) + " Votes\n"
+ "3️⃣ " + mapChoice3 + " " * (30 - len(mapChoice3)) + str(len(mapVotes[mapChoice3])) + " Votes\n"
+ "4️⃣ " + mapChoice4 + " " * (30 - len(mapChoice4)) + str(len(mapVotes[mapChoice4])) + " Votes```")
# else:
# await reaction.message.channel.send("Youre not in the pickup sir.")
@client.event
async def on_ready():
print(f'{client.user} is aliiiiiive!')
client.run(TOKEN)
``` |
{
"source": "12star9/Python-Tools",
"score": 2
} |
#### File: Python-Tools/re-sign-ipa/re-sign-ipa.py
```python
import zipfile
import os.path
import os
import time
import shutil
import subprocess
import plistlib
import commands
class ReSignIpaLOgic(object):
def __init__(self,ipa_path):
self.ipa_path = ipa_path
self.embedCode=False
self.signextensions=['.framework/']
# def printMobileProvisionProfile(self):
# os.system('security cms -D -i %s'%(self.mobileProvisionProfilePath))
#导入证书,获取证书名,处理部署Mac机器上没有导入对应证书的情况
def importP12CerFile(self,cer_path,cer_password):
p12Name=''
p = subprocess.call('security import %s -k ~/Library/Keychains/login.keychain -P %s -T /usr/bin/codesign'%(cer_path,cer_password),shell=True)
if p == 0:
p = subprocess.check_output('openssl pkcs12 -nodes -in %s -info -nokeys -passin "pass:%s" 2>/dev/null | grep "friendlyName"'%(cer_path,cer_password),shell=True)
p= str(p)
p= p.strip()
p= p.replace('friendlyName:','',1)
p = p.rstrip('\n')
p12Name = p
return p12Name
def copyprovsion2appdir(self,originpath,mobileprovision):
for dirpath, dirnames, filenames in os.walk(originpath):
if dirpath[dirpath.rfind('.'):] == '.app':
shutil.copy(mobileprovision,'%s/%s' % (dirpath,'embedded.mobileprovision'))
return True
return False
def start_generate_entitlements(self,mobileprovisionpath,entilementspath):
entilementfull = entilementspath[:entilementspath.rfind('.')] + '_full.plist'
(status1, output1) = commands.getstatusoutput('security cms -D -i "%s" > %s' % (mobileprovisionpath, entilementfull))
(status2, output2) = commands.getstatusoutput('/usr/libexec/PlistBuddy -x -c "Print:Entitlements" %s > %s' % (entilementfull,entilementspath))
return status1 == 0 and status2 == 0
def isneedsign(self,filename):
for signextension in self.signextensions:
if signextension == filename[filename.rfind('.'):]:
return True
return False
def codesign(self,certificate,entilement,signObj,extrapath):
# 开始注入代码
if self.embedCode==True and '.app' in signObj and not '.framework' in signObj and not '/PlugIns/' in signObj and not '.dylib' in signObj:
machFileName= signObj.split('/')[-2].split('.')[-2]
machFilePath= os.path.join(extrapath,signObj,machFileName)
os.system('chmod +x %s'%(machFilePath))
machFileFrameworkPath= os.path.join(extrapath,signObj,'Frameworks')
machFoloerPath=os.path.join(extrapath,signObj)
insert_sdks_path=os.path.join(os.getcwd(),'insert_sdks')
shutil.copytree('%s/MobGiAdsToolModuleBundle.bundle'%(insert_sdks_path),os.path.join(extrapath,signObj,'MobGiAdsToolModuleBundle.bundle'))
if not os.path.exists(machFileFrameworkPath):
shutil.copytree('%s/Frameworks'%(insert_sdks_path),os.path.join(extrapath,signObj,'Frameworks'))
pass
else:
for temp_path in os.listdir('%s/Frameworks'%(insert_sdks_path)):
if '.framework' in temp_path:
shutil.copytree(os.path.join('%s/Frameworks'%(insert_sdks_path),temp_path),os.path.join(machFileFrameworkPath,temp_path))
pass
os.system('chmod +x %s'%('%s/yololib'%(os.getcwd())))
cmd1='%s/yololib %s %s'%(os.getcwd(),machFilePath,'Frameworks/MobGiAdsToolModule.framework/MobGiAdsToolModule')
print cmd1
os.system(cmd1)
frameworkPath2= os.path.join(machFileFrameworkPath,'SDKCommonModule.framework/SDKCommonModule')
os.system('%s/yololib %s %s'%(os.getcwd(),machFilePath,'Frameworks/SDKCommonModule.framework/SDKCommonModule'))
sign_cmd='codesign -f -s "%s" --entitlements "%s" "%s"' % (certificate,entilement,os.path.join(machFileFrameworkPath,'MobGiAdsToolModule.framework/'))
os.system(sign_cmd)
sign_cmd='codesign -f -s "%s" --entitlements "%s" "%s"' % (certificate,entilement,os.path.join(machFileFrameworkPath,'SDKCommonModule.framework/'))
os.system(sign_cmd)
sign_cmd='codesign -f -s "%s" --entitlements "%s" "%s"' % (certificate,entilement,'%s%s' % (extrapath,signObj))
print sign_cmd
(status, output) = commands.getstatusoutput(sign_cmd)
if status == 0 and 'replacing existing signature' in output:
print 'replacing %s existing signature successed' % signObj
return True
else:
print(output)
return False
def startsign(self,certificate,entilement,zfilelist,extrapath):
print("----------------开始签名----------------")
for filename in zfilelist:
if self.isneedsign(filename):
if not self.codesign(certificate,entilement,filename,extrapath):
return False
return True
def zipcompress(self,originpath,destinationzfile):
resignedzfile = zipfile.ZipFile(destinationzfile,'w',zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(originpath):
fpath = dirpath.replace(originpath,'')
fpath = fpath and fpath + os.sep or ''
for filename in filenames:
resignedzfile.write(os.path.join(dirpath, filename), fpath+filename)
resignedzfile.close()
def verifySignature(self,extralfilepath):
for dirpath, dirnames, filenames in os.walk(extralfilepath):
if dirpath[dirpath.rfind('.'):] == '.app':
(status,output) = commands.getstatusoutput('codesign -v "%s"' % dirpath)
if len(output) == 0:
return True
else:
print(output)
return False
return False
def startInvoke(self):
zipFilePath = self.ipa_path
extrapath = '%s/Payload_From_Ipa/' % (os.path.dirname(zipFilePath))
certificate='iPhone Developer: <NAME> (H6KAK88X9G)'
mobileprovision = '/Users/star.liao/Desktop/Git/Python-Tools/re-sign-ipa/embedded.mobileprovision'
self.mobileProvisionProfilePath=mobileprovision
# self.printMobileProvisionProfile()
entilement = extrapath + "entitlements.plist"
destinationzfile = zipFilePath[:zipFilePath.rfind('.')] + '_resigned.ipa'
originzfile = zipfile.ZipFile(zipFilePath,'r')
zfilelist = originzfile.namelist()
zfilelist.reverse()
originzfile.extractall(extrapath)
self.copyprovsion2appdir(extrapath, mobileprovision)
if not self.start_generate_entitlements(mobileprovision,entilement):
originzfile.close()
shutil.rmtree(extrapath)
return False
try:
#开始签名
if zfilelist != None and self.startsign(certificate,entilement,zfilelist,extrapath):
if self.verifySignature(extrapath):
self.zipcompress(extrapath,destinationzfile)
print "重签名打包成功,请查看:%s" % destinationzfile
else:
pass
else:
pass
finally:
originzfile.close()
shutil.rmtree(extrapath)
reSignIpaLOgic=ReSignIpaLOgic('/Users/star.liao/Desktop/Git/Python-Tools/re-sign-ipa/1.ipa')
reSignIpaLOgic.embedCode=True
reSignIpaLOgic.startInvoke()
```
#### File: Python-Tools/sdk_thinning/SDK_Thinning.py
```python
import sys
import os
import subprocess
import time
def init_a_files():
specify_str = '.a'
sdk_path = u'/Users/star.liao/Desktop/三轮测试游戏工程/TempleRun20425/TR2_SDKList/AdS_SDK/AggregationAdThirdSDKs'
# 搜索指定目录
results = []
folders = [sdk_path]
for folder in folders:
# 把目录下所有文件夹存入待遍历的folders
folders += [os.path.join(folder, x) for x in os.listdir(folder) \
if os.path.isdir(os.path.join(folder, x))]
# 把所有满足条件的文件的相对地址存入结果results
for x in os.listdir(folder):
if os.path.isfile(os.path.join(folder, x)) and specify_str in x:
sdk_path = os.path.join(folder, x);
if '.framework' in sdk_path:
continue;
results.append(sdk_path);
pass
return results;
def init_framework_files():
specify_str = '.framework'
sdk_path = u'/Users/star.liao/Desktop/三轮测试游戏工程/TempleRun20425/TR2_SDKList/AdS_SDK/AggregationAdThirdSDKs'
# 搜索指定目录
results = []
folders = [sdk_path]
for folder in folders:
# 把目录下所有文件夹存入待遍历的folders
folders += [os.path.join(folder, x) for x in os.listdir(folder) \
if os.path.isdir(os.path.join(folder, x))]
# 把所有满足条件的文件的相对地址存入结果results
if specify_str in folder:
framework_result= os.path.split(folder);
framework_name= framework_result[1];
framework_name_len=len(framework_name);
total_len=framework_name_len-len(specify_str);
result=framework_name[0:total_len];
framework_name_temp=result;
if len(result) != 0:
for x in os.listdir(folder):
if x in framework_name_temp:
sdk_path = os.path.join(folder, x);
results.append(sdk_path);
pass;
return results;
def get_sdk_infos(sdk_path):
cmd = "lipo -info %s" % sdk_path
cmpsplit=cmd.split()
output = subprocess.check_output(cmpsplit)
result=set(output.split())
architectures=[];
thin_sdks=[];
for d in result:
if('armv7' in d or 'armv7s' in d or 'arm64' in d or 'x86_64' in d or 'i386' in d):
architectures.append(d);
output_path = create_arch_framework(sdk_path, d);
if ('armv7' in d or 'armv7s' in d or 'arm64' in d):
thin_sdks.append(output_path);
# create sdk file
# os.remove(sdk_path);
str = ' '.join(thin_sdks)
created_file_path=sdk_path
cmd = u"lipo -create %s -output %s" % (str, created_file_path)
cmpsplit = cmd.split()
try:
output = subprocess.check_output(cmpsplit)
except subprocess.CalledProcessError, e:
print '%s error:%s!' % (cmd, e)
# delete
time.sleep(2)
for file_temp in thin_sdks:
if(os.path.exists(file_temp)):
# os.remove(file_temp)
pass
pass
def get_file_path(sdk_path,rename_name):
root_path = os.path.dirname(sdk_path);
output_sdk_path = ''
if (os.path.isfile(sdk_path)):
array_result= sdk_path.split('/');
framework_path= array_result[len(array_result)-2];
framework_sdk_path=array_result[len(array_result)-1];
if '.a' in framework_sdk_path:
# 是.a文件
temp_sdk_path = os.path.split(sdk_path)
file_name = temp_sdk_path[1];
file_name_ext = file_name.split('.');
file_name = file_name_ext[0] + '_%s' % (rename_name);
new_file_name = '%s/%s.%s' % (root_path, file_name, file_name_ext[1]);
output_sdk_path = new_file_name;
else:
# framework_root_path=os.path.abspath(os.path.dirname(sdk_path) + os.path.sep + "..")
framework_root_path = os.path.dirname(sdk_path);
output_sdk_path=os.path.join(framework_root_path,'%s_%s' %(framework_sdk_path,rename_name));
# 是.framework文件
pass
return output_sdk_path;
def create_arch_framework(sdk_path,arch):
# lipo / Users / star.liao / Desktop / InterstitialAd_branch_1
# .8
# .0 / SDK / AggregationAdThirdSDKs / Baidu / 4.5 / BaiduMobAdSDK.framework / BaiduMobAdSDK - thin
# armv7s - output / Users / star.liao / Desktop / InterstitialAd_branch_1
# .8
# .0 / SDK / AggregationAdThirdSDKs / Baidu / BaiduMobAdSDK_7s
output_sdk_path = get_file_path(sdk_path,arch);
cmd = u"lipo %s -thin %s -output %s" %(sdk_path,arch,output_sdk_path)
cmpsplit = cmd.split()
try:
output = subprocess.check_output(cmpsplit)
except subprocess.CalledProcessError, e:
print '%s error:%s!' %(cmd,e)
pass
return output_sdk_path;
def getFileName(path):
''' 获取指定目录下的所有指定后缀的文件名 '''
# print f_list
for i in os.walk(path):
# os.path.splitext():分离文件名与扩展名
if os.path.splitext(i)[1] == '.a':
print i
if __name__ == '__main__':
paras_len= len(sys.argv)
if paras_len>=1:
source_file_path = sys.argv[1]
print 'source_file_path:',source_file_path
result_file_path = get_sdk_infos(source_file_path);
print 'success!'
pass
``` |
{
"source": "12Tech/proxytea",
"score": 2
} |
#### File: proxy/forward/app.py
```python
import io
import json
import os
from uuid import uuid4
import boto3
sqs = boto3.client('sqs')
s3_client = boto3.client('s3')
queue_url = os.getenv('SQS_URL')
bucket_name = os.getenv('BUCKET_NAME')
prefix = os.getenv('BODY_PREFIX', 'proxytea')
def dump_body(body: bytes) -> str:
body_uuid = str(uuid4())
object_prefix = body_uuid[:2]
object_key = f"{prefix}/{object_prefix}/{body_uuid}"
s3_client.put_object(
Body=body.encode('utf-8'),
Bucket=bucket_name,
Key=object_key
)
return f"s3://{bucket_name}/{object_key}"
def lambda_handler(event, context):
dedup = str(uuid4())
body = event.get('body')
if body is not None:
event['body'] = dump_body(body)
sqs.send_message(
QueueUrl=queue_url,
MessageBody=json.dumps(event),
MessageGroupId=dedup
)
return {
"statusCode": 200,
"body": json.dumps(event, indent=4)
}
``` |
{
"source": "12tqian/verification-helper-1",
"score": 2
} |
#### File: onlinejudge_verify/languages/list.py
```python
import pathlib
from logging import getLogger
from typing import *
from onlinejudge_verify.config import get_config
from onlinejudge_verify.languages.cplusplus import CPlusPlusLanguage
from onlinejudge_verify.languages.csharpscript import CSharpScriptLanguage
from onlinejudge_verify.languages.go import GoLanguage
from onlinejudge_verify.languages.haskell import HaskellLanguage
from onlinejudge_verify.languages.java import JavaLanguage
from onlinejudge_verify.languages.models import Language
from onlinejudge_verify.languages.nim import NimLanguage
from onlinejudge_verify.languages.python import PythonLanguage
from onlinejudge_verify.languages.ruby import RubyLanguage
from onlinejudge_verify.languages.rust import RustLanguage
from onlinejudge_verify.languages.user_defined import UserDefinedLanguage
logger = getLogger(__name__)
_dict: Optional[Dict[str, Language]] = None
def _get_dict() -> Dict[str, Language]:
global _dict # pylint: disable=invalid-name
if _dict is None:
_dict = {}
_dict['.cpp'] = CPlusPlusLanguage()
_dict['.hpp'] = _dict['.cpp']
_dict['.cc'] = _dict['.cpp']
_dict['.h'] = _dict['.cpp']
_dict['.csx'] = CSharpScriptLanguage()
_dict['.nim'] = NimLanguage()
_dict['.py'] = PythonLanguage()
_dict['.hs'] = HaskellLanguage()
_dict['.ruby'] = RubyLanguage()
_dict['.go'] = GoLanguage()
_dict['.java'] = JavaLanguage()
_dict['.rs'] = RustLanguage()
for ext, config in get_config().get('languages', {}).items():
if '.' + ext in _dict:
if not isinstance(_dict['.' + ext], UserDefinedLanguage):
for key in ('compile', 'execute', 'bundle', 'list_attributes', 'list_dependencies'):
if key in config:
raise RuntimeError("You cannot overwrite existing language: .{}".format(ext))
else:
logger.warning("config.toml: languages.%s: Adding new languages using `config.toml` is supported but not recommended. Please consider making pull requests for your languages, see https://github.com/kmyk/online-judge-verify-helper/issues/116", ext)
_dict['.' + ext] = UserDefinedLanguage(extension=ext, config=config)
return _dict
def get(path: pathlib.Path) -> Optional[Language]:
return _get_dict().get(path.suffix)
```
#### File: onlinejudge_verify/languages/rust.py
```python
import abc
import enum
import functools
import itertools
import json
import pathlib
import shutil
import subprocess
from collections import defaultdict
from enum import Enum
from logging import getLogger
from subprocess import PIPE
from typing import *
from onlinejudge_verify.config import get_config
from onlinejudge_verify.languages import special_comments
from onlinejudge_verify.languages.models import Language, LanguageEnvironment
logger = getLogger(__name__)
_metadata_by_manifest_path: Dict[pathlib.Path, Dict[str, Any]] = {}
_cargo_checked_workspaces: Set[pathlib.Path] = set()
_related_source_files_by_workspace: Dict[pathlib.Path, Dict[pathlib.Path, FrozenSet[pathlib.Path]]] = {}
class _ListDependenciesBackend:
@abc.abstractmethod
def list_dependencies(self, path: pathlib.Path, *, basedir: pathlib.Path) -> List[pathlib.Path]:
raise NotImplementedError
class _NoBackend(_ListDependenciesBackend):
def list_dependencies(self, path: pathlib.Path, *, basedir: pathlib.Path) -> List[pathlib.Path]:
return _list_dependencies_by_crate(path, basedir=basedir, cargo_udeps_toolchain=None)
class _CargoUdeps(_ListDependenciesBackend):
toolchain: str = 'nightly'
def __init__(self, *, toolchain: Optional[str]):
if toolchain is not None:
self.toolchain = toolchain
def list_dependencies(self, path: pathlib.Path, *, basedir: pathlib.Path) -> List[pathlib.Path]:
return _list_dependencies_by_crate(path, basedir=basedir, cargo_udeps_toolchain=self.toolchain)
@functools.lru_cache(maxsize=None)
def _list_dependencies_by_crate(path: pathlib.Path, *, basedir: pathlib.Path, cargo_udeps_toolchain: Optional[str]) -> List[pathlib.Path]:
"""The `list_dependencies` implementation for `_NoBackend` and `CargoUdeps`.
:param path: A parameter in `Language.list_dependencies`.
:param basedir: A parameter in `Language.list_dependencies`.
:param cargo_udeps_toolchain: A Rust toolchain name for cargo-udeps. If it is `None`, we don't run cargo-udeps.
:returns: Paths to the `.rs` files for `Language.list_dependencies`.
"""
path = basedir / path
# We regard that a generated file does not depend on any files.
for parent in path.parents:
if (parent.parent / 'Cargo.toml').exists() and parent.parts[-1] == 'target':
logger.warning('This is a generated file!: %s', path)
return [path]
metadata = _cargo_metadata(cwd=path.parent)
# First, collects source files in the same crate.
common_result = set(_source_files_in_same_targets(path, _related_source_files(basedir, metadata)))
main_package_and_target = _find_target(metadata, path)
if not main_package_and_target:
return sorted(common_result)
main_package, main_target = main_package_and_target
packages_by_id = {p['id']: p for p in metadata['packages']}
class DependencyNamespace(Enum):
NORMAL_DEVELOPMENT = enum.auto()
BUILD = enum.auto()
@classmethod
def from_dep_kind(cls, kind: str):
if kind == 'build':
return cls.BUILD
return cls.NORMAL_DEVELOPMENT
# Collect the `(|dev-|build-)dependencies` into a <is a `build-dependency`> → (<"extern crate name"> → <package>) dictionary.
dependencies: DefaultDict[DependencyNamespace, Dict[str, Dict[str, Any]]] = defaultdict(dict)
for dep in next(n['deps'] for n in metadata['resolve']['nodes'] if n['id'] == main_package['id']):
if _need_dev_deps(main_target) or any(k['kind'] is None for k in dep['dep_kinds']):
dependencies[DependencyNamespace.NORMAL_DEVELOPMENT][dep['name']] = packages_by_id[dep['pkg']]
if any(k['kind'] == 'build' for k in dep['dep_kinds']):
dependencies[DependencyNamespace.BUILD][dep['name']] = packages_by_id[dep['pkg']]
# If `cargo_udeps_toolchain` is present, collects packages that are "unused" by `target`.
unused_packages = defaultdict(set)
if cargo_udeps_toolchain is not None:
explicit_names_in_toml = {(DependencyNamespace.from_dep_kind(d['kind']), d['rename']) for d in main_package['dependencies'] if d['rename']}
if not shutil.which('cargo-udeps'):
raise RuntimeError('`cargo-udeps` not in $PATH')
unused_deps = json.loads(subprocess.run(
['rustup', 'run', cargo_udeps_toolchain, 'cargo', 'udeps', '--output', 'json', '--manifest-path', main_package['manifest_path'], *_target_option(main_target)],
cwd=metadata['workspace_root'],
check=False,
stdout=PIPE,
).stdout.decode())['unused_deps'].values()
unused_dep = next((u for u in unused_deps if u['manifest_path'] == main_package['manifest_path']), None)
if unused_dep:
names_in_toml = [(DependencyNamespace.NORMAL_DEVELOPMENT, name_in_toml) for name_in_toml in [*unused_dep['normal'], *unused_dep['development']]]
names_in_toml.extend((DependencyNamespace.BUILD, name_in_toml) for name_in_toml in unused_dep['build'])
for dependency_namespace, name_in_toml in names_in_toml:
if (dependency_namespace, name_in_toml) in explicit_names_in_toml:
# If the `name_in_toml` is explicitly renamed one, it equals to the `extern_crate_name`.
unused_package = dependencies[dependency_namespace][name_in_toml]['id']
else:
# Otherwise, it equals to the `package.name`.
unused_package = next(p['id'] for p in dependencies[dependency_namespace].values() if p['name'] == name_in_toml)
unused_packages[dependency_namespace].add(unused_package)
# Finally, adds source files related to the depended crates except:
#
# - those detected by cargo-udeps
# - those come from Crates.io or Git repositories (e.g. `proconio`, other people's libraries including `ac-library-rs`)
# `main_package` should always be included.
# Note that cargo-udeps does not detect it if it is unused.
# https://github.com/est31/cargo-udeps/pull/35
depended_packages = [main_package]
for dependency_namespace, values in dependencies.items():
for depended_package in values.values():
if depended_package['id'] not in unused_packages[dependency_namespace] and not depended_package['source']:
depended_packages.append(depended_package)
ret = common_result
for depended_package in depended_packages:
depended_targets = [t for t in depended_package['targets'] if t != main_target and (_is_build(t) or _is_lib_or_proc_macro(t))]
assert len(depended_targets) <= 2
for depended_target in depended_targets:
related_source_files = _related_source_files(basedir, _cargo_metadata_by_manifest_path(pathlib.Path(depended_package["manifest_path"])))
ret |= _source_files_in_same_targets(pathlib.Path(depended_target['src_path']).resolve(strict=True), related_source_files)
return sorted(ret)
def _related_source_files(basedir: pathlib.Path, metadata: Dict[str, Any]) -> Dict[pathlib.Path, FrozenSet[pathlib.Path]]:
"""Collects all of the `.rs` files recognized by a workspace.
:param basedir: A parameter from `Language.list_dependencies`.
:param metadata: Output of `cargo metadata`
:returns: A (main source file) → (other related files) map
"""
if pathlib.Path(metadata['workspace_root']) in _related_source_files_by_workspace:
return _related_source_files_by_workspace[pathlib.Path(metadata['workspace_root'])]
# Runs `cargo check` to generate `$target_directory/debug/deps/*.d`.
if pathlib.Path(metadata['workspace_root']) not in _cargo_checked_workspaces:
subprocess.run(
['cargo', 'check', '--manifest-path', str(pathlib.Path(metadata['workspace_root'], 'Cargo.toml')), '--workspace', '--all-targets'],
cwd=metadata['workspace_root'],
check=True,
)
_cargo_checked_workspaces.add(pathlib.Path(metadata['workspace_root']))
ret: Dict[pathlib.Path, FrozenSet[pathlib.Path]] = dict()
targets_in_workspace = itertools.chain.from_iterable(p['targets'] for p in metadata['packages'] if p['id'] in metadata['workspace_members'])
for target in targets_in_workspace:
# Finds the **latest** "dep-info" file that contains a line in the following format, and parses the line.
#
# ```
# <relative/absolute path to the `.d` file itself>: <relative/absolute path to the root source file> <relative/aboslute paths to the other related files>...
# ```
#
# - https://github.com/rust-lang/cargo/blob/rust-1.49.0/src/cargo/core/compiler/fingerprint.rs#L1979-L1997
# - https://github.com/rust-lang/cargo/blob/rust-1.49.0/src/cargo/core/compiler/fingerprint.rs#L1824-L1830
if _is_build(target):
dep_info_paths = pathlib.Path(metadata['target_directory'], 'debug', 'build').rglob(f'{_crate_name(target)}-*.d')
elif _is_example(target):
dep_info_paths = pathlib.Path(metadata['target_directory'], 'debug', 'examples').glob(f'{_crate_name(target)}-*.d')
else:
dep_info_paths = pathlib.Path(metadata['target_directory'], 'debug', 'deps').glob(f'{_crate_name(target)}-*.d')
for dep_info_path in sorted(dep_info_paths, key=lambda p: p.stat().st_mtime_ns, reverse=True):
with open(dep_info_path) as file:
dep_info = file.read()
for line in dep_info.splitlines():
ss = line.split(': ')
if len(ss) == 2 and pathlib.Path(metadata['workspace_root'], ss[0]) == dep_info_path:
paths = []
it = iter(ss[1].split())
for s in it:
while s.endswith('\\'):
s = s.rstrip('\\')
s += ' '
s += next(it)
path = pathlib.Path(metadata['workspace_root'], s).resolve(strict=True)
# Ignores paths that don't start with the `basedir`. (e.g. `/dev/null`, `/usr/local/share/foo/bar`)
try:
# `PurePath.is_relative_to` is since Python 3.9.
_ = path.relative_to(basedir)
paths.append(path)
except ValueError:
pass
if paths[:1] == [pathlib.Path(target['src_path']).resolve(strict=True)]:
ret[paths[0]] = frozenset(paths[1:])
break
else:
continue
break
else:
logger.error('no `.d` file for `%s`', target["name"])
_related_source_files_by_workspace[pathlib.Path(metadata['workspace_root'])] = ret
return ret
def _source_files_in_same_targets(path: pathlib.Path, related_source_files: Dict[pathlib.Path, FrozenSet[pathlib.Path]]) -> FrozenSet[pathlib.Path]:
"""Returns `.rs` file paths relating to `path`.
:param path: Path to a `.rs` file
:param related_source_files: Output of `_related_source_files`
:returns: Relating `.rs` file paths
"""
# If `p` is `src_path` of a target, it does not belong to any other target unless it's weirdly symlinked,
if path in related_source_files:
return frozenset({path, *related_source_files[path]})
# Otherwise, it may be used by multiple targets with `#[path = ".."] mod foo;` or something.
return frozenset(itertools.chain.from_iterable({k, *v} for (k, v) in related_source_files.items() if path in v)) or frozenset({path})
class RustLanguageEnvironment(LanguageEnvironment):
def compile(self, path: pathlib.Path, *, basedir: pathlib.Path, tempdir: pathlib.Path) -> None:
path = basedir / path
metadata = _cargo_metadata(cwd=path.parent)
target = _ensure_target(metadata, path)
subprocess.run(
['cargo', 'build', '--release', *_target_option(target)],
cwd=path.parent,
check=True,
)
def get_execute_command(self, path: pathlib.Path, *, basedir: pathlib.Path, tempdir: pathlib.Path) -> List[str]:
path = basedir / path
metadata = _cargo_metadata(cwd=path.parent)
target = _ensure_target(metadata, path)
return [str(pathlib.Path(metadata['target_directory'], 'release', *([] if _is_bin(target) else ['examples']), target['name']))]
class RustLanguage(Language):
_list_dependencies_backend: _ListDependenciesBackend
def __init__(self, *, config: Optional[Dict[str, Any]] = None):
if config is None:
config = get_config().get('languages', {}).get('rust', {})
# Parses `languages.rust.list_dependencies_backend`.
if 'list_dependencies_backend' in config:
list_dependencies_backend = config['list_dependencies_backend']
if not isinstance(list_dependencies_backend, dict):
raise RuntimeError('`languages.rust.list_dependencies_backend` must be `dict`')
if 'kind' not in list_dependencies_backend:
raise RuntimeError('missing `languages.rust.list_dependencies_backend.kind`')
list_dependencies_backend_kind = list_dependencies_backend['kind']
if not isinstance(list_dependencies_backend_kind, str):
raise RuntimeError('`languages.rust.list_dependencies_backend.kind` must be `str`')
if list_dependencies_backend_kind == 'none':
self._list_dependencies_backend = _NoBackend()
elif list_dependencies_backend_kind == 'cargo-udeps':
if 'toolchain' not in list_dependencies_backend:
toolchain = None
elif isinstance(list_dependencies_backend['toolchain'], str):
toolchain = list_dependencies_backend['toolchain']
else:
raise RuntimeError('`languages.rust.list_dependencies_backend.toolchain` must be `str`')
self._list_dependencies_backend = _CargoUdeps(toolchain=toolchain)
else:
raise RuntimeError("expected 'none' or 'cargo-udeps' for `languages.rust.list_dependencies_backend.kind`")
else:
self._list_dependencies_backend = _NoBackend()
def list_dependencies(self, path: pathlib.Path, *, basedir: pathlib.Path) -> List[pathlib.Path]:
return self._list_dependencies_backend.list_dependencies(path, basedir=basedir)
def bundle(self, path: pathlib.Path, *, basedir: pathlib.Path, options: Dict[str, Any]) -> bytes:
raise NotImplementedError
def is_verification_file(self, path: pathlib.Path, *, basedir: pathlib.Path) -> bool:
path = basedir / path
metadata = _cargo_metadata(cwd=path.parent)
package_and_target = _find_target(metadata, path)
if not package_and_target:
return False
_, target = package_and_target
return _is_bin_or_example_bin(target) and 'PROBLEM' in special_comments.list_special_comments(path)
def list_environments(self, path: pathlib.Path, *, basedir: pathlib.Path) -> Sequence[RustLanguageEnvironment]:
return [RustLanguageEnvironment()]
def _cargo_metadata(cwd: pathlib.Path) -> Dict[str, Any]:
"""Returns "metadata" for a Cargo.toml file in `cwd` or its parent directories.
:raises ValueError: if `cwd` is not absolute or contains `..`
:returns: Output of `cargo metadata` command
"""
if not cwd.is_absolute() or '..' in cwd.parts:
raise ValueError(f'the `cwd` parameter must be absolute and must not contain `..`: {cwd}')
# https://docs.rs/cargo/0.49.0/src/cargo/util/important_paths.rs.html#6-20
for directory in [cwd, *cwd.parents]:
manifest_path = directory / 'Cargo.toml'
if manifest_path.exists():
return _cargo_metadata_by_manifest_path(manifest_path)
raise RuntimeError(f'could not find `Cargo.toml` in `{cwd}` or any parent directory')
def _cargo_metadata_by_manifest_path(manifest_path: pathlib.Path) -> Dict[str, Any]:
"""Returns "metadata" for a certain `Cargo.toml`.
:returns: Output of `cargo metadata` command
"""
if manifest_path in _metadata_by_manifest_path:
return _metadata_by_manifest_path[manifest_path]
metadata = _run_cargo_metadata(manifest_path)
root_manifest_path = pathlib.Path(metadata['workspace_root'], 'Cargo.toml')
if root_manifest_path != manifest_path:
metadata = _run_cargo_metadata(root_manifest_path)
for key in [root_manifest_path, *(pathlib.Path(p['manifest_path']) for p in metadata['packages'] if p['id'] in metadata['workspace_members'])]:
_metadata_by_manifest_path[key] = metadata
return metadata
def _run_cargo_metadata(manifest_path: pathlib.Path) -> Dict[str, Any]:
"""Runs `cargo metadata` for a certain `Cargo.toml`.
This function is considered to be executed just once for every Cargo.toml in the repository.
For detailed information about `cargo metadata`, see:
- <https://doc.rust-lang.org/cargo/commands/cargo-metadata.html#output-format>
- <https://docs.rs/cargo_metadata>
:param manifest_path: Path to a `Cargo.toml`
:returns: Output of `cargo metadata` command
"""
return json.loads(subprocess.run(
['cargo', 'metadata', '--format-version', '1', '--manifest-path', str(manifest_path)],
stdout=PIPE,
cwd=manifest_path.parent,
check=True,
).stdout.decode())
def _find_target(
metadata: Dict[str, Any],
src_path: pathlib.Path,
) -> Optional[Tuple[Dict[str, Any], Dict[str, Any]]]:
for package in metadata['packages']:
for target in package['targets']:
# A `src_path` may contain `..`
# The path may not actually exist by being excluded from the package.
if pathlib.Path(target['src_path']).resolve() == src_path:
return package, target
return None
def _ensure_target(metadata: Dict[str, Any], src_path: pathlib.Path) -> Dict[str, Any]:
package_and_target = _find_target(metadata, src_path)
if not package_and_target:
raise RuntimeError(f'{src_path} is not a main source file of any target')
_, target = package_and_target
return target
def _crate_name(target: Dict[str, Any]) -> bool:
return target['name'].replace('-', '_')
def _is_build(target: Dict[str, Any]) -> bool:
return target['kind'] == ['custom-build']
def _is_lib_or_proc_macro(target: Dict[str, Any]) -> bool:
return target['kind'] in [['lib'], ['proc-macro']]
def _is_bin(target: Dict[str, Any]) -> bool:
return target['kind'] == ['bin']
def _is_example(target: Dict[str, Any]) -> bool:
return target['kind'] == ['example']
def _is_bin_or_example_bin(target: Dict[str, Any]) -> bool:
return _is_bin(target) or _is_example(target) and target['crate_types'] == ['bin']
def _need_dev_deps(target: Dict[str, Any]) -> bool:
# Comes from https://docs.rs/cargo/0.49.0/cargo/ops/enum.CompileFilter.html#method.need_dev_deps
return not (_is_lib_or_proc_macro(target) or _is_bin(target))
def _target_option(target: Dict[str, Any]) -> List[str]:
if target['kind'] == ['bin']:
return ['--bin', target['name']]
if target['kind'] == ['example']:
return ['--example', target['name']]
if target['kind'] == ['test']:
return ['--test', target['name']]
if target['kind'] == ['bench']:
return ['--bench', target['name']]
return ['--lib']
```
#### File: onlinejudge_verify/online_submission/judges.py
```python
from onlinejudge_verify.online_submission.submissions import *
from requests import session
import requests
import mechanize
from selenium import webdriver
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.action_chains import ActionChains
import time
import json
import traceback
class VJudge:
JUDGE_NAME = "vjudge"
JUDGE_URL = "https://vjudge.net/"
PROBLEM_URL = "https://vjudge.net/problem/"
GOOD_VERDICTS = ["Accepted"]
BAD_VERDICTS = ["Time", "Wrong", "Compilation", "Runtime", "Memory", "Output", "Presentation", "Compile", "Unknown"]
LANGUAGES = {
"C" : "43", #GNU GCC C11 5.1.0
"java" : "36", # Java 1.8.0_241
"cpp" : "61", #
"C++" : "C++", # C++ 17 64 bit
"py" : "41" #PyPy 3.6 (7.2.0)
}
JUDGE_PREFIX = {
"codeforces" : "CodeForces",
"atcoder" : "AtCoder",
"spoj" : "SPOJ",
"kattis" : "Kattis"
}
JUDGE_MARKER = {
"codeforces.com" : "codeforces",
"atcoder.jp" : "atcoder",
"spoj.com" : "spoj",
"open.kattis.com" : "kattis"
}
JUDGE_LANGUAGE_VALUE = {
'codeforces' : {
'C++' : '61'
},
'atcoder' : {
'C++' : '4003'
},
'kattis' : {
'C++' : 'C++'
},
'spoj' : {
'C++' : '44'
}
}
username: str
password: str
def current_millisecond_time(self):
return round(time.time() * 1000)
def __init__(self, username = "", password = ""):
self.username = username
self.password = password
def get_vjudge_problem_link(self, problem_link):
judge_name = ''
for marker in self.JUDGE_MARKER.keys():
if marker in problem_link:
judge_name = self.JUDGE_MARKER[marker]
break
if judge_name == '':
return None
add = ''
lst = problem_link.split('/')
if (lst[-1] == ''):
lst.pop()
if judge_name == 'codeforces':
if lst[-2] == 'problem':
add = lst[-3] + lst[-1];
else:
add = lst[-2] + lst[-1]
elif judge_name == 'atcoder' or judge_name == 'spoj' or judge_name == 'kattis':
add = lst[-1]
if (judge_name == 'atcoder'):
add = add.split('?')[0] # get rid of language extension
return [judge_name, self.PROBLEM_URL + self.JUDGE_PREFIX[judge_name] + '-' + add]
def submit_solution(self, problem_link, solution):
# Logging in
options = Options()
options.add_argument('--headless')
options.add_argument('--disable-gpu') # Last I checked this was necessary.
driver = webdriver.Chrome(chrome_options=options)
driver.get(self.JUDGE_URL)
wait = WebDriverWait(driver, 10)
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/nav/div/ul/li[8]/a")))
driver.execute_script("arguments[0].click();", element)
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[4]/div/div/div[2]/form/div[1]/input")))
driver.execute_script("arguments[0].value = arguments[1];", element, self.username)
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[4]/div/div/div[2]/form/div[2]/input")))
driver.execute_script("arguments[0].value = arguments[1];", element, self.password)
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[4]/div/div/div[3]/button[3]")))
driver.execute_script("arguments[0].click();", element)
judge_name, submission_url = self.get_vjudge_problem_link(problem_link)
# Submitting Solution
MAX_RETRIES = 5
retries = 0
while retries <= MAX_RETRIES:
try:
driver.get(submission_url)
driver.get(submission_url)
driver.get(submission_url)
# click submit button
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div[1]/button")))
driver.execute_script("arguments[0].click();", element)
# select language
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[3]/div/div/div[2]/form/div/div[4]/div/select")))
value = self.JUDGE_LANGUAGE_VALUE[judge_name][solution.language]
driver.execute_script('''
var select = arguments[0];
for (var i = 0; i < select.options.length; i++) {
if (select.options[i].value == arguments[1]) {
select.options[i].selected = true;
}
}''', element, value);
# insert code
new_code = solution.solution_code + "\n// " + str(self.current_millisecond_time())
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[3]/div/div/div[2]/form/div/div[6]/div/textarea")))
driver.execute_script("arguments[0].value = arguments[1];", element, new_code)
# click submit
element = wait.until(EC.element_to_be_clickable((By.XPATH, "/html/body/div[3]/div/div/div[3]/button[2]")))
driver.execute_script("arguments[0].click();", element)
start = time.time()
# repeat check for result
while True:
try:
text = wait.until(EC.visibility_of_element_located((By.XPATH, "/html/body/div[3]/div/div/div[2]/div[1]/table/tbody/tr[1]/td"))).text
except:
text = ''
text = text.split(' ')[0]
if text in self.GOOD_VERDICTS:
driver.quit()
return True
elif text in self.BAD_VERDICTS:
driver.quit()
return False
time.sleep(0.25)
if time.time() - start >= 120:
break
except:
retries += 1
driver.refresh()
driver.quit()
return False
class Codeforces:
JUDGE_NAME = "codeforces"
JUDGE_URL = "https://codeforces.com/"
LOGIN_URL = "https://codeforces.com/enter"
SUBMISSION_URL = "https://codeforces.com/problemset/submit"
RESULT_URL = "https://codeforces.com/contest/"
LANGUAGES = {
"C" : "43", #GNU GCC C11 5.1.0
"java" : "36", # Java 1.8.0_241
"cpp" : "61", #
"C++" : "61", # C++ 17 64 bit
"py" : "41" #PyPy 3.6 (7.2.0)
}
username: str
password: str
logged_in: bool
br: mechanize.Browser
def __init__(self, username = "", password = ""):
self.username = username
self.password = password
self.logged_in = False
self.br = mechanize.Browser()
# Browser options
self.br.set_handle_equiv(True)
self.br.set_handle_gzip(True)
self.br.set_handle_redirect(True)
self.br.set_handle_referer(True)
self.br.set_handle_robots(False)
self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time = 1)
self.br.addheaders = [('User-agent', 'Chrome')]
def login(self):
# print("Trying to log into CodeForces: " + self.username)
# The site we will navigate into, handling it's session
self.br.open(self.LOGIN_URL)
# Select the second (index one) form (the first form is a search query box)
# Logging in
self.br.select_form(nr = 1)
self.logged_in = True
self.br.form['handleOrEmail'] = self.username
self.br.form['password'] = <PASSWORD>
res = self.br.submit()
if res.geturl() == self.JUDGE_URL:
# print("Logged In Successfully")
return True
else:
# print("CF: Sorry, wrong username/password. Please try again.")
self.logged_in = False
return False
def get_contest_number(self, problem_id):
res = ''
for i in range(len(problem_id)):
if problem_id[i].isdigit():
res += problem_id[i]
else:
break
return int(res)
def current_millisecond_time(self):
return round(time.time() * 1000)
def submit_solution(self, problem, solution):
if not self.logged_in:
self.login()
self.br.open(self.SUBMISSION_URL)
self.br.select_form(nr = 1)
self.br.form.find_control(name = "programTypeId").value = [self.LANGUAGES[solution.language]]
self.br.form.find_control(name = "submittedProblemCode").value = str(problem.problem_id)
self.br.form.find_control(name = "source").value = solution.solution_code + "\n// " + str(self.current_millisecond_time())
res = self.br.submit()
if "https://codeforces.com/problemset/status?my=on" != str(res.geturl()):
return ""
# then we should check if the verdict has been given
# should check repeatedly delaying 5-10 secs and stop when a verdict is given
time.sleep(0.25)
response = requests.get("https://codeforces.com/api/user.status?handle=" + self.username + "&from=1&count=1")
submission_id = ""
if response.status_code == 200:
try:
data = json.loads(response.content.decode('utf-8'))
except:
return False
submission_id = data['result'][0]['id']
else:
# print("Could't get submission id. Please try this problem again later.")
return False
submission_url = self.RESULT_URL + str(self.get_contest_number(problem.problem_id)) + '/submission/' + str(submission_id)
# wait for result
start = time.time()
time.sleep(0.25)
# print(submission_url)
while True:
if (time.time() - start >= 60):
break
response = requests.get("https://codeforces.com/api/user.status?handle=" + self.username + "&from=1&count=1")
try:
data = json.loads(response.content.decode('utf-8'))
except:
time.sleep(0.5)
continue
data = data['result'][0]
if 'verdict' not in data.keys():
continue
try:
verdict = str(data['verdict'])
except:
traceback.print_exc()
# print(data)
return False
if verdict == "TESTING":
time.sleep(0.25)
else:
if verdict == "OK":
return True
else:
return False
break
return False
```
#### File: onlinejudge_verify/online_submission/test.py
```python
from judges import *
from submissions import *
import requests
import time
def current_milli_time():
return round(time.time() * 1000)
judge = Codeforces('vhelperoj', '<PASSWORD>')
problem = Problem('codeforces', '4A')
solution = Solution('C++', '''#include <bits/stdc++.h>
using namespace std;
int main() {
ios::sync_with_stdio(false);
cin.tie(nullptr);
int n;
cin >> n;
if (n > 2 && n % 2 == 0) {
cout << "YES" << '\\n';
} else {
cout << "NO" << '\\n';
}
return 0;
}''')
print(judge.submit_solution(problem, solution))
``` |
{
"source": "12tqian/verification-helper",
"score": 3
} |
#### File: onlinejudge_verify/languages/python.py
```python
import functools
import pathlib
import sys
import textwrap
from logging import getLogger
from typing import Any, Dict, List, Sequence, Tuple
import importlab.environment
import importlab.fs
import importlab.graph
from onlinejudge_verify.languages.models import Language, LanguageEnvironment
logger = getLogger(__name__)
class PythonLanguageEnvironment(LanguageEnvironment):
def compile(self, path: pathlib.Path, *, basedir: pathlib.Path, tempdir: pathlib.Path) -> None:
code = textwrap.dedent(f"""\
#!{sys.executable}
\"\"\"This is a helper script to run the target Python code.
We need this script to set PYTHONPATH portably. The env command, quoting something, etc. are not portable or difficult to implement.
\"\"\"
import os
import sys
# arguments
path = {repr(str(path.resolve()))}
basedir = {repr(str(basedir.resolve()))}
# run {str(path)}
env = dict(os.environ)
if "PYTHONPATH" in env:
env["PYTHONPATH"] = basedir + os.pathsep + env["PYTHONPATH"]
else:
env["PYTHONPATH"] = basedir # set `PYTHONPATH` to import files relative to the root directory
os.execve(sys.executable, [sys.executable, path], env=env) # use `os.execve` to avoid making an unnecessary parent process
""")
with open(tempdir / 'compiled.py', 'wb') as fh:
fh.write(code.encode())
def get_execute_command(self, path: pathlib.Path, *, basedir: pathlib.Path, tempdir: pathlib.Path) -> List[str]:
return [sys.executable, str(tempdir / 'compiled.py')]
@functools.lru_cache(maxsize=None)
def _python_list_depending_files(path: pathlib.Path, basedir: pathlib.Path) -> List[pathlib.Path]:
# compute the dependency graph of the `path`
env = importlab.environment.Environment(
importlab.fs.Path([importlab.fs.OSFileSystem(str(basedir.resolve()))]),
(sys.version_info.major, sys.version_info.minor),
)
res_graph = importlab.graph.ImportGraph.create(env, [str(path)])
try:
node_deps_pairs = res_graph.deps_list() # type: List[Tuple[str, List[str]]]
except Exception as e:
raise RuntimeError(f"Failed to analyze the dependency graph (circular imports?): {path}") from e
logger.debug('the dependency graph of %s: %s', str(path), node_deps_pairs)
# collect Python files which are depended by the `path` and under `basedir`
res_deps = [] # type: List[pathlib.Path]
res_deps.append(path.resolve())
for node_, deps_ in node_deps_pairs:
node = pathlib.Path(node_)
deps = list(map(pathlib.Path, deps_))
if node.resolve() == path.resolve():
for dep in deps:
if basedir.resolve() in dep.resolve().parents:
res_deps.append(dep.resolve())
break
return list(set(res_deps))
class PythonLanguage(Language):
def list_dependencies(self, path: pathlib.Path, *, basedir: pathlib.Path) -> List[pathlib.Path]:
return _python_list_depending_files(path.resolve(), basedir)
def bundle(self, path: pathlib.Path, *, basedir: pathlib.Path, options: Dict[str, Any]) -> bytes:
"""
:throws NotImplementedError:
"""
raise NotImplementedError
def is_verification_file(self, path: pathlib.Path, *, basedir: pathlib.Path) -> bool:
return '.test.py' in path.name
def list_environments(self, path: pathlib.Path, *, basedir: pathlib.Path) -> Sequence[PythonLanguageEnvironment]:
# TODO add another environment (e.g. pypy)
return [PythonLanguageEnvironment()]
``` |
{
"source": "12urenloop/Ronny-the-station-chef",
"score": 3
} |
#### File: Ronny-the-station-chef/database/schemas.py
```python
from typing import List, Optional
from datetime import datetime
from pydantic import BaseModel, Field, validator
class Detection(BaseModel):
id: int
mac: str
rssi: int
baton_uptime_ms: int = Field(alias="uptime_ms")
battery_percentage: float = Field(alias="battery")
detection_time: float = Field(alias="detection_timestamp")
@validator("*", pre=True)
def convert_time(cls, v) -> float:
if isinstance(v, datetime):
return v.timestamp()
return v
class Config:
orm_mode = True
allow_population_by_field_name = True
class DetectionsResponse(BaseModel):
detections: List[Detection]
station_id: str
class UnixTimeResponse(BaseModel):
timestamp: int
class LastDetectionResponse(BaseModel):
detection: Optional[Detection]
station_id: str
```
#### File: 12urenloop/Ronny-the-station-chef/ronny.py
```python
import struct
import time
import logging
from datetime import datetime
from database.models import Base, Detection
from database.database import SessionLocal, engine
from sqlalchemy.orm import Session
from scapy.layers.bluetooth import (
HCI_LE_Meta_Advertising_Reports,
EIR_Manufacturer_Specific_Data,
BluetoothHCISocket,
HCI_Hdr,
HCI_Command_Hdr,
HCI_Cmd_LE_Set_Scan_Parameters,
HCI_Cmd_LE_Set_Scan_Enable,
)
logging.basicConfig(
format="[%(levelname)s][%(asctime)s] %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
level=logging.INFO,
)
Base.metadata.create_all(bind=engine)
db: Session = SessionLocal()
zeus_mac_prefix = "5a:45:55:53"
def packet_callback(packet):
try:
do_commit = False
for report in packet[HCI_LE_Meta_Advertising_Reports].reports:
if report.addr.startswith(zeus_mac_prefix):
mac = str(report.addr).lower()
if EIR_Manufacturer_Specific_Data not in packet:
logging.warning(f"No manufacturer information {mac}")
continue
content = bytes(packet[EIR_Manufacturer_Specific_Data].payload)
if len(content) == 23:
logging.warning(f"Skipping old baton {mac}")
continue
elif len(content) != 9:
logging.error(f"Fake baton {mac} {len(content)} {content.hex()}")
continue
uptime_ms, battery_percentage = struct.unpack(">QB", content)
rssi = int(report.rssi)
detection: Detection = Detection(
detection_time=datetime.now(),
mac=mac,
rssi=rssi,
baton_uptime_ms=uptime_ms,
battery_percentage=battery_percentage,
)
db.add(detection)
do_commit = True
if do_commit:
db.commit()
except Exception as e:
logging.critical(e, exc_info=True)
bt = BluetoothHCISocket(0)
bt.sr(HCI_Hdr() / HCI_Command_Hdr() / HCI_Cmd_LE_Set_Scan_Parameters(type=0))
bt.sr(
HCI_Hdr()
/ HCI_Command_Hdr()
/ HCI_Cmd_LE_Set_Scan_Enable(enable=True, filter_dups=False)
)
bt.sniff(
lfilter=lambda p: HCI_LE_Meta_Advertising_Reports in p,
store=False,
prn=packet_callback,
)
``` |
{
"source": "12wang3/mllp",
"score": 3
} |
#### File: mllp/mllp/utils.py
```python
import numpy as np
import pandas as pd
from sklearn import preprocessing
from sklearn.impute import SimpleImputer
from mllp.discretizer import MinimalEntropyDiscretizer
def read_info(info_path):
with open(info_path) as f:
f_list = []
for line in f:
tokens = line.strip().split()
f_list.append(tokens)
return f_list[:-1], int(f_list[-1][-1])
def read_csv(data_path, info_path, shuffle=False):
D = pd.read_csv(data_path, header=None)
if shuffle:
D = D.sample(frac=1, random_state=0).reset_index(drop=True)
f_list, label_pos = read_info(info_path)
f_df = pd.DataFrame(f_list)
D.columns = f_df.iloc[:, 0]
y_df = D.iloc[:, [label_pos]]
X_df = D.drop(D.columns[label_pos], axis=1)
f_df = f_df.drop(f_df.index[label_pos])
return X_df, y_df, f_df, label_pos
class DBEncoder:
"""Encoder used for data discretization and binarization."""
def __init__(self, f_df, discrete=False):
self.f_df = f_df
self.discrete = discrete
self.label_enc = preprocessing.OneHotEncoder(categories='auto')
self.me_discretizer = MinimalEntropyDiscretizer()
self.feature_enc = preprocessing.OneHotEncoder(categories='auto')
self.imp = SimpleImputer(missing_values=np.nan, strategy='mean')
self.X_fname = None
self.y_fname = None
def split_data(self, X_df):
discrete_data = X_df[self.f_df.loc[self.f_df[1] == 'discrete', 0]]
continuous_data = X_df[self.f_df.loc[self.f_df[1] == 'continuous', 0]]
if not continuous_data.empty:
continuous_data = continuous_data.replace(to_replace=r'.*\?.*', value=np.nan, regex=True)
continuous_data = continuous_data.astype(np.float)
return discrete_data, continuous_data
def fit(self, X_df, y_df):
X_df = X_df.reset_index(drop=True)
y_df = y_df.reset_index(drop=True)
discrete_data, continuous_data = self.split_data(X_df)
self.label_enc.fit(y_df)
self.y_fname = list(self.label_enc.get_feature_names(y_df.columns))
if not continuous_data.empty:
if self.discrete:
self.me_discretizer = MinimalEntropyDiscretizer()
self.me_discretizer.fit(continuous_data, y_df)
isna_df = continuous_data.isna()
continuous_data = self.me_discretizer.transform(continuous_data)
for k in isna_df:
continuous_data.loc[isna_df[k], k] = '?'
discrete_data = pd.concat([discrete_data, continuous_data], axis=1)
else:
# Use mean as missing value for continuous columns if do not discretize them.
self.imp.fit(continuous_data.values)
if not discrete_data.empty:
# One-hot encoding
self.feature_enc.fit(discrete_data)
feature_names = discrete_data.columns
self.X_fname = list(self.feature_enc.get_feature_names(feature_names))
if not self.discrete:
self.X_fname.extend(continuous_data.columns)
else:
self.X_fname = continuous_data.columns
def transform(self, X_df, y_df):
X_df = X_df.reset_index(drop=True)
y_df = y_df.reset_index(drop=True)
discrete_data, continuous_data = self.split_data(X_df)
# Encode string value to int index.
y = self.label_enc.transform(y_df.values.reshape(-1, 1)).toarray()
if not continuous_data.empty:
if self.discrete:
isna_df = continuous_data.isna()
continuous_data = self.me_discretizer.transform(continuous_data)
for k in isna_df:
continuous_data.loc[isna_df[k], k] = '?'
discrete_data = pd.concat([discrete_data, continuous_data], axis=1)
else:
# Use mean as missing value for continuous columns if we do not discretize them.
continuous_data = pd.DataFrame(self.imp.transform(continuous_data.values),
columns=continuous_data.columns)
if not discrete_data.empty:
# One-hot encoding
discrete_data = self.feature_enc.transform(discrete_data)
if not self.discrete:
X_df = pd.concat([pd.DataFrame(discrete_data.toarray()), continuous_data], axis=1)
else:
X_df = pd.DataFrame(discrete_data.toarray())
else:
X_df = continuous_data
return X_df.values, y
class UnionFind:
"""Union-Find algorithm used for merging the identical nodes in MLLP."""
def __init__(self, keys):
self.stu = {}
for k in keys:
self.stu[k] = k
def find(self, x):
try:
self.stu[x]
except KeyError:
return x
if x != self.stu[x]:
self.stu[x] = self.find(self.stu[x])
return self.stu[x]
def union(self, x, y):
xf = self.find(x)
yf = self.find(y)
if xf != yf:
self.stu[yf] = xf
return True
return False
``` |
{
"source": "12-wu/-",
"score": 3
} |
#### File: 12-wu/-/losses.py
```python
from torch.nn.functional import mse_loss
import torch.nn.functional as func
def completion_network_loss(input, output, mask):
#return func.cross_entropy(output * mask, input * mask)
return mse_loss(output * mask, input * mask)
``` |
{
"source": "12xiaoni/text-label",
"score": 2
} |
#### File: tests/api/test_comment.py
```python
from rest_framework import status
from rest_framework.reverse import reverse
from .utils import (CRUDMixin, make_comment, make_doc, make_user,
prepare_project)
class TestCommentListDocAPI(CRUDMixin):
@classmethod
def setUpTestData(cls):
cls.project = prepare_project()
cls.non_member = make_user()
doc = make_doc(cls.project.item)
make_comment(doc, cls.project.users[0])
cls.data = {'text': 'example'}
cls.url = reverse(viewname='comment_list_doc', args=[cls.project.item.id, doc.id])
def test_allows_project_member_to_list_comments(self):
for member in self.project.users:
response = self.assert_fetch(member, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
def test_denies_non_project_member_to_list_comments(self):
self.assert_fetch(self.non_member, status.HTTP_403_FORBIDDEN)
def test_denies_unauthenticated_user_to_list_comments(self):
self.assert_fetch(expected=status.HTTP_403_FORBIDDEN)
def test_allows_project_member_to_create_comment(self):
for member in self.project.users:
self.assert_create(member, status.HTTP_201_CREATED)
def test_denies_non_project_member_to_create_comment(self):
self.assert_create(self.non_member, status.HTTP_403_FORBIDDEN)
def test_denies_unauthenticated_user_to_create_comment(self):
self.assert_create(expected=status.HTTP_403_FORBIDDEN)
class TestCommentListProjectAPI(CRUDMixin):
def setUp(self):
self.project = prepare_project()
self.non_member = make_user()
self.doc = make_doc(self.project.item)
make_comment(self.doc, self.project.users[0])
self.url = reverse(viewname='comment_list_project', args=[self.project.item.id])
def test_allows_project_member_to_list_comments(self):
for member in self.project.users:
response = self.assert_fetch(member, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
def test_denies_non_project_member_to_list_comments(self):
self.assert_fetch(self.non_member, status.HTTP_403_FORBIDDEN)
def test_denies_unauthenticated_user_to_list_comments(self):
self.assert_fetch(expected=status.HTTP_403_FORBIDDEN)
def assert_bulk_delete(self, user=None, expected=status.HTTP_403_FORBIDDEN):
ids = [item.id for item in self.doc.comments.all()]
if user:
self.client.force_login(user)
response = self.client.delete(self.url, data={'ids': ids}, format='json')
self.assertEqual(response.status_code, expected)
def test_allows_project_member_to_delete_comments(self):
# Todo: Disallow non admin to delete comments.
for member in self.project.users:
self.assert_bulk_delete(member, status.HTTP_204_NO_CONTENT)
response = self.client.get(self.url)
self.assertEqual(response.data['count'], 0)
def test_denies_non_project_member_to_delete_comments(self):
self.assert_fetch(self.non_member, status.HTTP_403_FORBIDDEN)
def test_denies_unauthenticated_user_to_delete_comments(self):
self.assert_fetch(expected=status.HTTP_403_FORBIDDEN)
class TestCommentDetailAPI(CRUDMixin):
def setUp(self):
self.project = prepare_project()
self.non_member = make_user()
doc = make_doc(self.project.item)
comment = make_comment(doc, self.project.users[0])
self.data = {'text': 'example'}
self.url = reverse(viewname='comment_detail', args=[self.project.item.id, doc.id, comment.id])
def test_allows_comment_owner_to_get_comment(self):
# Todo: Allows project member to get comment.
self.assert_fetch(self.project.users[0], status.HTTP_200_OK)
def test_disallows_non_comment_owner_to_get_comment(self):
for member in self.project.users[1:]:
self.assert_fetch(member, status.HTTP_403_FORBIDDEN)
def test_disallows_non_project_member_to_get_comment(self):
self.assert_fetch(self.non_member, status.HTTP_403_FORBIDDEN)
def test_disallows_unauthenticated_user_to_get_comment(self):
self.assert_fetch(expected=status.HTTP_403_FORBIDDEN)
def test_allows_comment_owner_to_update_comment(self):
response = self.assert_update(self.project.users[0], status.HTTP_200_OK)
self.assertEqual(response.data['text'], self.data['text'])
def test_disallows_non_comment_owner_to_update_comment(self):
for member in self.project.users[1:]:
self.assert_update(member, status.HTTP_403_FORBIDDEN)
def test_disallows_non_project_member_to_update_comment(self):
self.assert_update(self.non_member, status.HTTP_403_FORBIDDEN)
def test_disallows_unauthenticated_user_to_update_comment(self):
self.assert_update(expected=status.HTTP_403_FORBIDDEN)
def test_allows_comment_owner_to_delete_comment(self):
self.assert_delete(self.project.users[0], status.HTTP_204_NO_CONTENT)
def test_disallows_non_comment_owner_to_delete_comment(self):
for member in self.project.users[1:]:
self.assert_delete(member, status.HTTP_403_FORBIDDEN)
def test_disallows_non_project_member_to_delete_comment(self):
self.assert_delete(self.non_member, status.HTTP_403_FORBIDDEN)
def test_disallows_unauthenticated_user_to_delete_comment(self):
self.assert_delete(expected=status.HTTP_403_FORBIDDEN)
```
#### File: api/views/annotation.py
```python
from django.shortcuts import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from members.permissions import IsAnnotationApprover, IsProjectAdmin
from ..models import Example
from ..serializers import ApproverSerializer
class ApprovalAPI(APIView):
permission_classes = [IsAuthenticated & (IsAnnotationApprover | IsProjectAdmin)]
def post(self, request, *args, **kwargs):
approved = self.request.data.get('approved', True)
example = get_object_or_404(Example, pk=self.kwargs['example_id'])
example.annotations_approved_by = self.request.user if approved else None
example.save()
return Response(ApproverSerializer(example).data)
```
#### File: views/tasks/base.py
```python
from functools import partial
from django.core.exceptions import ValidationError
from django.shortcuts import get_object_or_404
from rest_framework import generics, status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from members.permissions import IsInProjectOrAdmin
from ...models import Project
from ...permissions import CanEditAnnotation
class BaseListAPI(generics.ListCreateAPIView):
annotation_class = None
pagination_class = None
permission_classes = [IsAuthenticated & IsInProjectOrAdmin]
swagger_schema = None
@property
def project(self):
return get_object_or_404(Project, pk=self.kwargs['project_id'])
def get_queryset(self):
queryset = self.annotation_class.objects.filter(example=self.kwargs['example_id'])
if not self.project.collaborative_annotation:
queryset = queryset.filter(user=self.request.user)
return queryset
def create(self, request, *args, **kwargs):
request.data['example'] = self.kwargs['example_id']
try:
response = super().create(request, args, kwargs)
except ValidationError as err:
response = Response({'detail': err.messages}, status=status.HTTP_400_BAD_REQUEST)
return response
def perform_create(self, serializer):
serializer.save(example_id=self.kwargs['example_id'], user=self.request.user)
def delete(self, request, *args, **kwargs):
queryset = self.get_queryset()
queryset.all().delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class BaseDetailAPI(generics.RetrieveUpdateDestroyAPIView):
lookup_url_kwarg = 'annotation_id'
swagger_schema = None
@property
def project(self):
return get_object_or_404(Project, pk=self.kwargs['project_id'])
def get_permissions(self):
if self.project.collaborative_annotation:
self.permission_classes = [IsAuthenticated & IsInProjectOrAdmin]
else:
self.permission_classes = [
IsAuthenticated & IsInProjectOrAdmin & partial(CanEditAnnotation, self.queryset)
]
return super().get_permissions()
```
#### File: backend/members/signals.py
```python
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import m2m_changed, post_save, pre_delete
from django.dispatch import receiver
from api.models import Project
from roles.models import Role
from .models import Member
@receiver(post_save, sender=Member)
def add_linked_project(sender, instance, created, **kwargs):
if not created:
return
userInstance = instance.user
projectInstance = instance.project
if userInstance and projectInstance:
user = User.objects.get(pk=userInstance.pk)
project = Project.objects.get(pk=projectInstance.pk)
user.projects.add(project)
user.save()
@receiver(m2m_changed, sender=Project.users.through)
def remove_mapping_on_remove_user_from_project(sender, instance, action, reverse, **kwargs):
# if reverse is True, pk_set is project_ids and instance is user.
# else, pk_set is user_ids and instance is project.
user_ids = kwargs['pk_set']
if action.startswith('post_remove') and not reverse:
Member.objects.filter(user__in=user_ids, project=instance).delete()
elif action.startswith('post_add') and not reverse:
admin_role = Role.objects.get(name=settings.ROLE_PROJECT_ADMIN)
Member.objects.bulk_create(
[Member(role=admin_role, project=instance, user_id=user)
for user in user_ids
if not Member.objects.filter(project=instance, user_id=user).exists()]
)
@receiver(pre_delete, sender=Member)
def delete_linked_project(sender, instance, using, **kwargs):
userInstance = instance.user
projectInstance = instance.project
if userInstance and projectInstance:
user = User.objects.get(pk=userInstance.pk)
project = Project.objects.get(pk=projectInstance.pk)
user.projects.remove(project)
user.save()
``` |
{
"source": "12yuens2/jMetalPy",
"score": 3
} |
#### File: jmetal/core/observer.py
```python
from abc import abstractmethod, ABC
"""
.. module:: Observable
:platform: Unix, Windows
:synopsis: Implementation of the observer-observable pattern.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
class Observer(ABC):
@abstractmethod
def update(self, *args, **kwargs):
""" Update method.
"""
pass
class Observable(ABC):
@abstractmethod
def register(self, observer):
pass
@abstractmethod
def deregister(self, observer):
pass
@abstractmethod
def deregister_all(self):
pass
@abstractmethod
def notify_all(self, *args, **kwargs):
pass
```
#### File: core/test/test_quality_indicator.py
```python
import unittest
from os.path import dirname, join
from pathlib import Path
import numpy as np
from jmetal.core.quality_indicator import GenerationalDistance, InvertedGenerationalDistance, EpsilonIndicator, \
HyperVolume
class GenerationalDistanceTestCases(unittest.TestCase):
""" Class including unit tests for class GenerationalDistance
"""
def test_should_constructor_create_a_non_null_object(self) -> None:
indicator = GenerationalDistance([])
self.assertIsNotNone(indicator)
def test_get_name_return_the_right_value(self):
self.assertEqual("Generational Distance", GenerationalDistance([]).get_name())
def test_get_short_name_return_the_right_value(self):
self.assertEqual("GD", GenerationalDistance([]).get_short_name())
def test_case1(self):
"""
Case 1. Reference front: [[1.0, 1.0]], front: [[1.0, 1.0]]
Expected result: the distance to the nearest point of the reference front is 0.0
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0]]))
front = np.array([[1.0, 1.0]])
result = indicator.compute(front)
self.assertEqual(0.0, result)
def test_case2(self):
"""
Case 2. Reference front: [[1.0, 1.0], [2.0, 2.0], front: [[1.0, 1.0]]
Expected result: the distance to the nearest point of the reference front is 0.0
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0], [2.0, 2.0]]))
front = np.array([[1.0, 1.0]])
result = indicator.compute(front)
self.assertEqual(0.0, result)
def test_case3(self):
"""
Case 3. Reference front: [[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]], front: [[1.0, 1.0, 1.0]]
Expected result: the distance to the nearest point of the reference front is 0.0. Example with three objectives
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]]))
front = np.array([[1.0, 1.0, 1.0]])
result = indicator.compute(front)
self.assertEqual(0.0, result)
def test_case4(self):
"""
Case 4. reference front: [[1.0, 1.0], [2.0, 2.0]], front: [[1.5, 1.5]]
Expected result: the distance to the nearest point of the reference front is the euclidean distance to any of the
points of the reference front
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0], [2.0, 2.0]]))
front = np.array([[1.5, 1.5]])
result = indicator.compute(front)
self.assertEqual(np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2)), result)
self.assertEqual(np.sqrt(pow(2.0 - 1.5, 2) + pow(2.0 - 1.5, 2)), result)
def test_case5(self):
"""
Case 5. reference front: [[1.0, 1.0], [2.1, 2.1]], front: [[1.5, 1.5]]
Expected result: the distance to the nearest point of the reference front is the euclidean distance
to the nearest point of the reference front ([1.0, 1.0])
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0], [2.1, 2.1]]))
front = np.array([[1.5, 1.5]])
result = indicator.compute(front)
self.assertEqual(np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2)), result)
self.assertEqual(np.sqrt(pow(2.0 - 1.5, 2) + pow(2.0 - 1.5, 2)), result)
def test_case6(self):
"""
Case 6. reference front: [[1.0, 1.0], [2.1, 2.1]], front: [[1.5, 1.5], [2.2, 2.2]]
Expected result: the distance to the nearest point of the reference front is the average of the sum of each point
of the front to the nearest point of the reference front
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0], [2.1, 2.1]]))
front = np.array([[1.5, 1.5], [2.2, 2.2]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2))
distance_of_second_point = np.sqrt(pow(2.1 - 2.2, 2) + pow(2.1 - 2.2, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point) / 2.0, result)
def test_case7(self):
"""
Case 7. reference front: [[1.0, 1.0], [2.1, 2.1]], front: [[1.5, 1.5], [2.2, 2.2], [1.9, 1.9]]
Expected result: the distance to the nearest point of the reference front is the sum of each point of the front to the
nearest point of the reference front
:return:
"""
indicator = GenerationalDistance(np.array([[1.0, 1.0], [2.1, 2.1]]))
front = np.array([[1.5, 1.5], [2.2, 2.2], [1.9, 1.9]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2))
distance_of_second_point = np.sqrt(pow(2.1 - 2.2, 2) + pow(2.1 - 2.2, 2))
distance_of_third_point = np.sqrt(pow(2.1 - 1.9, 2) + pow(2.1 - 1.9, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point + distance_of_third_point) / 3.0, result)
class InvertedGenerationalDistanceTestCases(unittest.TestCase):
""" Class including unit tests for class InvertedGenerationalDistance
"""
def test_should_constructor_create_a_non_null_object(self) -> None:
indicator = InvertedGenerationalDistance([])
self.assertIsNotNone(indicator)
def test_get_name_return_the_right_value(self):
self.assertEqual("Inverted Generational Distance", InvertedGenerationalDistance([]).get_name())
def test_get_short_name_return_the_right_value(self):
self.assertEqual("IGD", InvertedGenerationalDistance([]).get_short_name())
def test_case1(self):
"""
Case 1. Reference front: [[1.0, 1.0]], front: [[1.0, 1.0]]
Expected result = 0.0
Comment: simplest case
:return:
"""
indicator = InvertedGenerationalDistance(np.array([[1.0, 1.0]]))
front = np.array([[1.0, 1.0]])
result = indicator.compute(front)
self.assertEqual(0.0, result)
def test_case2(self):
"""
Case 2. Reference front: [[1.0, 1.0], [2.0, 2.0], front: [[1.0, 1.0]]
Expected result: average of the sum of the distances of the points of the reference front to the front
:return:
"""
indicator = InvertedGenerationalDistance(np.array([[1.0, 1.0], [2.0, 2.0]]))
front = np.array([[1.0, 1.0]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.0, 2) + pow(1.0 - 1.0, 2))
distance_of_second_point = np.sqrt(pow(2.0 - 1.0, 2) + pow(2.0 - 1.0, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point) / 2.0, result)
def test_case3(self):
"""
Case 3. Reference front: [[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]], front: [[1.0, 1.0, 1.0]]
Expected result: average of the sum of the distances of the points of the reference front to the front.
Example with three objectives
:return:
"""
indicator = InvertedGenerationalDistance(np.array([[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]]))
front = np.array([[1.0, 1.0, 1.0]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.0, 2) + pow(1.0 - 1.0, 2) + pow(1.0 - 1.0, 2))
distance_of_second_point = np.sqrt(pow(2.0 - 1.0, 2) + pow(2.0 - 1.0, 2) + pow(2.0 - 1.0, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point) / 2.0, result)
def test_case4(self):
"""
Case 4. reference front: [[1.0, 1.0], [2.1, 2.1]], front: [[1.5, 1.5], [2.2, 2.2]]
Expected result: average of the sum of the distances of the points of the reference front to the front.
Example with three objectives
:return:
"""
indicator = InvertedGenerationalDistance(np.array([[1.0, 1.0], [2.1, 2.1]]))
front = np.array([[1.5, 1.5], [2.2, 2.2]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2))
distance_of_second_point = np.sqrt(pow(2.1 - 2.2, 2) + pow(2.1 - 2.2, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point) / 2.0, result)
def test_case5(self):
"""
Case 5. reference front: [[1.0, 1.0], [2.1, 2.1]], front: [[1.5, 1.5], [2.2, 2.2], [1.9, 1.9]]
Expected result: average of the sum of the distances of the points of the reference front to the front.
Example with three objectives
:return:
"""
indicator = InvertedGenerationalDistance(np.array([[1.0, 1.0], [2.0, 2.0]]))
front = np.array([[1.5, 1.5], [2.2, 2.2], [1.9, 1.9]])
result = indicator.compute(front)
distance_of_first_point = np.sqrt(pow(1.0 - 1.5, 2) + pow(1.0 - 1.5, 2))
distance_of_second_point = np.sqrt(pow(2.0 - 1.9, 2) + pow(2.0 - 1.9, 2))
self.assertEqual((distance_of_first_point + distance_of_second_point) / 2.0, result)
class EpsilonIndicatorTestCases(unittest.TestCase):
""" Class including unit tests for class EpsilonIndicator
"""
def test_should_constructor_create_a_non_null_object(self) -> None:
indicator = EpsilonIndicator(np.array([[1.0, 1.0], [2.0, 2.0]]))
self.assertIsNotNone(indicator)
class HyperVolumeTestCases(unittest.TestCase):
def setUp(self):
self.file_path = dirname(join(dirname(__file__)))
def test_should_hypervolume_return_5_0(self):
reference_point = [2, 2, 2]
front = np.array([[1, 0, 1], [0, 1, 0]])
hv = HyperVolume(reference_point)
value = hv.compute(front)
self.assertEqual(5.0, value)
def test_should_hypervolume_return_the_correct_value_when_applied_to_the_ZDT1_reference_front(self):
filename = 'jmetal/core/test/ZDT1.pf'
front = []
if Path(filename).is_file():
with open(filename) as file:
for line in file:
vector = [float(x) for x in line.split()]
front.append(vector)
else:
print("error")
reference_point = [1, 1]
hv = HyperVolume(reference_point)
value = hv.compute(np.array(front))
self.assertAlmostEqual(0.666, value, delta=0.001)
if __name__ == '__main__':
unittest.main()
```
#### File: jmetal/operator/crossover.py
```python
import copy
import random
from typing import List
from jmetal.core.operator import Crossover
from jmetal.core.solution import Solution, FloatSolution, BinarySolution, PermutationSolution, IntegerSolution, \
CompositeSolution
from jmetal.util.ckecking import Check
"""
.. module:: crossover
:platform: Unix, Windows
:synopsis: Module implementing crossover operators.
.. moduleauthor:: <NAME> <<EMAIL>>, <NAME> <<EMAIL>>
"""
class NullCrossover(Crossover[Solution, Solution]):
def __init__(self):
super(NullCrossover, self).__init__(probability=0.0)
def execute(self, parents: List[Solution]) -> List[Solution]:
if len(parents) != 2:
raise Exception('The number of parents is not two: {}'.format(len(parents)))
return parents
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self):
return 'Null crossover'
class PMXCrossover(Crossover[PermutationSolution, PermutationSolution]):
def __init__(self, probability: float):
super(PMXCrossover, self).__init__(probability=probability)
def execute(self, parents: List[PermutationSolution]) -> List[PermutationSolution]:
if len(parents) != 2:
raise Exception('The number of parents is not two: {}'.format(len(parents)))
offspring = [copy.deepcopy(parents[0]), copy.deepcopy(parents[1])]
permutation_length = offspring[0].number_of_variables
rand = random.random()
if rand <= self.probability:
cross_points = sorted([random.randint(0, permutation_length) for _ in range(2)])
def _repeated(element, collection):
c = 0
for e in collection:
if e == element:
c += 1
return c > 1
def _swap(data_a, data_b, cross_points):
c1, c2 = cross_points
new_a = data_a[:c1] + data_b[c1:c2] + data_a[c2:]
new_b = data_b[:c1] + data_a[c1:c2] + data_b[c2:]
return new_a, new_b
def _map(swapped, cross_points):
n = len(swapped[0])
c1, c2 = cross_points
s1, s2 = swapped
map_ = s1[c1:c2], s2[c1:c2]
for i_chromosome in range(n):
if not c1 < i_chromosome < c2:
for i_son in range(2):
while _repeated(swapped[i_son][i_chromosome], swapped[i_son]):
map_index = map_[i_son].index(swapped[i_son][i_chromosome])
swapped[i_son][i_chromosome] = map_[1 - i_son][map_index]
return s1, s2
swapped = _swap(parents[0].variables, parents[1].variables, cross_points)
mapped = _map(swapped, cross_points)
offspring[0].variables, offspring[1].variables = mapped
return offspring
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self):
return 'Partially Matched crossover'
class CXCrossover(Crossover[PermutationSolution, PermutationSolution]):
def __init__(self, probability: float):
super(CXCrossover, self).__init__(probability=probability)
def execute(self, parents: List[PermutationSolution]) -> List[PermutationSolution]:
if len(parents) != 2:
raise Exception('The number of parents is not two: {}'.format(len(parents)))
offspring = [copy.deepcopy(parents[1]), copy.deepcopy(parents[0])]
rand = random.random()
if rand <= self.probability:
for i in range(parents[0].number_of_variables):
idx = random.randint(0, len(parents[0].variables[i]) - 1)
curr_idx = idx
cycle = []
while True:
cycle.append(curr_idx)
curr_idx = parents[0].variables[i].index(parents[1].variables[i][curr_idx])
if curr_idx == idx:
break
for j in range(len(parents[0].variables[i])):
if j in cycle:
offspring[0].variables[i][j] = parents[0].variables[i][j]
offspring[1].variables[i][j] = parents[0].variables[i][j]
return offspring
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self):
return 'Cycle crossover'
class SBXCrossover(Crossover[FloatSolution, FloatSolution]):
__EPS = 1.0e-14
def __init__(self, probability: float, distribution_index: float = 20.0):
super(SBXCrossover, self).__init__(probability=probability)
self.distribution_index = distribution_index
if distribution_index < 0:
raise Exception("The distribution index is negative: " + str(distribution_index))
def execute(self, parents: List[FloatSolution]) -> List[FloatSolution]:
Check.that(type(parents[0]) is FloatSolution, "Solution type invalid: " + str(type(parents[0])))
Check.that(type(parents[1]) is FloatSolution, "Solution type invalid")
Check.that(len(parents) == 2, 'The number of parents is not two: {}'.format(len(parents)))
offspring = [copy.deepcopy(parents[0]), copy.deepcopy(parents[1])]
rand = random.random()
if rand <= self.probability:
for i in range(parents[0].number_of_variables):
value_x1, value_x2 = parents[0].variables[i], parents[1].variables[i]
if random.random() <= 0.5:
if abs(value_x1 - value_x2) > self.__EPS:
if value_x1 < value_x2:
y1, y2 = value_x1, value_x2
else:
y1, y2 = value_x2, value_x1
lower_bound, upper_bound = parents[0].lower_bound[i], parents[1].upper_bound[i]
beta = 1.0 + (2.0 * (y1 - lower_bound) / (y2 - y1))
alpha = 2.0 - pow(beta, -(self.distribution_index + 1.0))
rand = random.random()
if rand <= (1.0 / alpha):
betaq = pow(rand * alpha, (1.0 / (self.distribution_index + 1.0)))
else:
betaq = pow(1.0 / (2.0 - rand * alpha), 1.0 / (self.distribution_index + 1.0))
c1 = 0.5 * (y1 + y2 - betaq * (y2 - y1))
beta = 1.0 + (2.0 * (upper_bound - y2) / (y2 - y1))
alpha = 2.0 - pow(beta, -(self.distribution_index + 1.0))
if rand <= (1.0 / alpha):
betaq = pow((rand * alpha), (1.0 / (self.distribution_index + 1.0)))
else:
betaq = pow(1.0 / (2.0 - rand * alpha), 1.0 / (self.distribution_index + 1.0))
c2 = 0.5 * (y1 + y2 + betaq * (y2 - y1))
if c1 < lower_bound:
c1 = lower_bound
if c2 < lower_bound:
c2 = lower_bound
if c1 > upper_bound:
c1 = upper_bound
if c2 > upper_bound:
c2 = upper_bound
if random.random() <= 0.5:
offspring[0].variables[i] = c2
offspring[1].variables[i] = c1
else:
offspring[0].variables[i] = c1
offspring[1].variables[i] = c2
else:
offspring[0].variables[i] = value_x1
offspring[1].variables[i] = value_x2
else:
offspring[0].variables[i] = value_x1
offspring[1].variables[i] = value_x2
return offspring
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self) -> str:
return 'SBX crossover'
class IntegerSBXCrossover(Crossover[IntegerSolution, IntegerSolution]):
__EPS = 1.0e-14
def __init__(self, probability: float, distribution_index: float = 20.0):
super(IntegerSBXCrossover, self).__init__(probability=probability)
self.distribution_index = distribution_index
def execute(self, parents: List[IntegerSolution]) -> List[IntegerSolution]:
Check.that(type(parents[0]) is IntegerSolution, "Solution type invalid")
Check.that(type(parents[1]) is IntegerSolution, "Solution type invalid")
Check.that(len(parents) == 2, 'The number of parents is not two: {}'.format(len(parents)))
offspring = [copy.deepcopy(parents[0]), copy.deepcopy(parents[1])]
rand = random.random()
if rand <= self.probability:
for i in range(parents[0].number_of_variables):
value_x1, value_x2 = parents[0].variables[i], parents[1].variables[i]
if random.random() <= 0.5:
if abs(value_x1 - value_x2) > self.__EPS:
if value_x1 < value_x2:
y1, y2 = value_x1, value_x2
else:
y1, y2 = value_x2, value_x1
lower_bound, upper_bound = parents[0].lower_bound[i], parents[1].upper_bound[i]
beta = 1.0 + (2.0 * (y1 - lower_bound) / (y2 - y1))
alpha = 2.0 - pow(beta, -(self.distribution_index + 1.0))
rand = random.random()
if rand <= (1.0 / alpha):
betaq = pow(rand * alpha, (1.0 / (self.distribution_index + 1.0)))
else:
betaq = pow(1.0 / (2.0 - rand * alpha), 1.0 / (self.distribution_index + 1.0))
c1 = 0.5 * (y1 + y2 - betaq * (y2 - y1))
beta = 1.0 + (2.0 * (upper_bound - y2) / (y2 - y1))
alpha = 2.0 - pow(beta, -(self.distribution_index + 1.0))
if rand <= (1.0 / alpha):
betaq = pow((rand * alpha), (1.0 / (self.distribution_index + 1.0)))
else:
betaq = pow(1.0 / (2.0 - rand * alpha), 1.0 / (self.distribution_index + 1.0))
c2 = 0.5 * (y1 + y2 + betaq * (y2 - y1))
if c1 < lower_bound:
c1 = lower_bound
if c2 < lower_bound:
c2 = lower_bound
if c1 > upper_bound:
c1 = upper_bound
if c2 > upper_bound:
c2 = upper_bound
if random.random() <= 0.5:
offspring[0].variables[i] = int(c2)
offspring[1].variables[i] = int(c1)
else:
offspring[0].variables[i] = int(c1)
offspring[1].variables[i] = int(c2)
else:
offspring[0].variables[i] = value_x1
offspring[1].variables[i] = value_x2
else:
offspring[0].variables[i] = value_x1
offspring[1].variables[i] = value_x2
return offspring
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self) -> str:
return 'Integer SBX crossover'
class SPXCrossover(Crossover[BinarySolution, BinarySolution]):
def __init__(self, probability: float):
super(SPXCrossover, self).__init__(probability=probability)
def execute(self, parents: List[BinarySolution]) -> List[BinarySolution]:
Check.that(type(parents[0]) is BinarySolution, "Solution type invalid")
Check.that(type(parents[1]) is BinarySolution, "Solution type invalid")
Check.that(len(parents) == 2, 'The number of parents is not two: {}'.format(len(parents)))
offspring = [copy.deepcopy(parents[0]), copy.deepcopy(parents[1])]
rand = random.random()
if rand <= self.probability:
# 1. Get the total number of bits
total_number_of_bits = parents[0].get_total_number_of_bits()
# 2. Calculate the point to make the crossover
crossover_point = random.randrange(0, total_number_of_bits)
# 3. Compute the variable containing the crossover bit
variable_to_cut = 0
bits_count = len(parents[1].variables[variable_to_cut])
while bits_count < (crossover_point + 1):
variable_to_cut += 1
bits_count += len(parents[1].variables[variable_to_cut])
# 4. Compute the bit into the selected variable
diff = bits_count - crossover_point
crossover_point_in_variable = len(parents[1].variables[variable_to_cut]) - diff
# 5. Apply the crossover to the variable
bitset1 = copy.copy(parents[0].variables[variable_to_cut])
bitset2 = copy.copy(parents[1].variables[variable_to_cut])
for i in range(crossover_point_in_variable, len(bitset1)):
swap = bitset1[i]
bitset1[i] = bitset2[i]
bitset2[i] = swap
offspring[0].variables[variable_to_cut] = bitset1
offspring[1].variables[variable_to_cut] = bitset2
# 6. Apply the crossover to the other variables
for i in range(variable_to_cut + 1, parents[0].number_of_variables):
offspring[0].variables[i] = copy.deepcopy(parents[1].variables[i])
offspring[1].variables[i] = copy.deepcopy(parents[0].variables[i])
return offspring
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self) -> str:
return 'Single point crossover'
class DifferentialEvolutionCrossover(Crossover[FloatSolution, FloatSolution]):
""" This operator receives two parameters: the current individual and an array of three parent individuals. The
best and rand variants depends on the third parent, according whether it represents the current of the "best"
individual or a random_search one. The implementation of both variants are the same, due to that the parent selection is
external to the crossover operator.
"""
def __init__(self, CR: float, F: float, K: float):
super(DifferentialEvolutionCrossover, self).__init__(probability=1.0)
self.CR = CR
self.F = F
self.K = K
self.current_individual: FloatSolution = None
def execute(self, parents: List[FloatSolution]) -> List[FloatSolution]:
""" Execute the differential evolution crossover ('best/1/bin' variant in jMetal).
"""
if len(parents) != self.get_number_of_parents():
raise Exception('The number of parents is not {}: {}'.format(self.get_number_of_parents(), len(parents)))
child = copy.deepcopy(self.current_individual)
number_of_variables = parents[0].number_of_variables
rand = random.randint(0, number_of_variables - 1)
for i in range(number_of_variables):
if random.random() < self.CR or i == rand:
value = parents[2].variables[i] + self.F * (parents[0].variables[i] - parents[1].variables[i])
if value < child.lower_bound[i]:
value = child.lower_bound[i]
if value > child.upper_bound[i]:
value = child.upper_bound[i]
else:
value = child.variables[i]
child.variables[i] = value
return [child]
def get_number_of_parents(self) -> int:
return 3
def get_number_of_children(self) -> int:
return 1
def get_name(self) -> str:
return 'Differential Evolution crossover'
class CompositeCrossover(Crossover[CompositeSolution, CompositeSolution]):
__EPS = 1.0e-14
def __init__(self, crossover_operator_list:[Crossover]):
super(CompositeCrossover, self).__init__(probability=1.0)
Check.is_not_none(crossover_operator_list)
Check.collection_is_not_empty(crossover_operator_list)
self.crossover_operators_list = []
for operator in crossover_operator_list:
Check.that(issubclass(operator.__class__, Crossover), "Object is not a subclass of Crossover")
self.crossover_operators_list.append(operator)
def execute(self, solutions: List[CompositeSolution]) -> List[CompositeSolution]:
Check.is_not_none(solutions)
Check.that(len(solutions) == 2, "The number of parents is not two: " + str(len(solutions)))
offspring1 = []
offspring2 = []
number_of_solutions_in_composite_solution = solutions[0].number_of_variables
for i in range(number_of_solutions_in_composite_solution):
parents = [solutions[0].variables[i], solutions[1].variables[i]]
children = self.crossover_operators_list[i].execute(parents)
offspring1.append(children[0])
offspring2.append(children[1])
return [CompositeSolution(offspring1), CompositeSolution(offspring2)]
def get_number_of_parents(self) -> int:
return 2
def get_number_of_children(self) -> int:
return 2
def get_name(self) -> str:
return 'Composite crossover'
```
#### File: multiobjective/test/test_unconstrained.py
```python
import unittest
from jmetal.problem.multiobjective.unconstrained import Kursawe, Fonseca, Schaffer, Viennet2
class KursaweTestCases(unittest.TestCase):
def test_should_constructor_create_a_non_null_object(self) -> None:
problem = Kursawe(3)
self.assertIsNotNone(problem)
def test_should_constructor_create_a_valid_problem_with_default_settings(self) -> None:
problem = Kursawe()
self.assertEqual(3, problem.number_of_variables)
self.assertEqual(2, problem.number_of_objectives)
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-5.0, -5.0, -5.0], problem.lower_bound)
self.assertEqual([5.0, 5.0, 5.0], problem.upper_bound)
def test_should_constructor_create_a_valid_problem_with_5_variables(self) -> None:
problem = Kursawe(5)
self.assertEqual(5, problem.number_of_variables)
self.assertEqual(2, problem.number_of_objectives)
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-5.0, -5.0, -5.0, -5.0, -5.0], problem.lower_bound)
self.assertEqual([5.0, 5.0, 5.0, 5.0, 5.0], problem.upper_bound)
def test_should_create_solution_create_a_valid_float_solution(self) -> None:
problem = Kursawe(3)
solution = problem.create_solution()
self.assertEqual(3, solution.number_of_variables)
self.assertEqual(3, len(solution.variables))
self.assertEqual(2, solution.number_of_objectives)
self.assertEqual(2, len(solution.objectives))
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-5.0, -5.0, -5.0], problem.lower_bound)
self.assertEqual([5.0, 5.0, 5.0], problem.upper_bound)
self.assertTrue(all(variable >= -5.0 for variable in solution.variables))
self.assertTrue(all(variable <= 5.0 for variable in solution.variables))
def test_should_get_name_return_the_right_name(self):
problem = Kursawe()
self.assertEqual("Kursawe", problem.get_name())
class FonsecaTestCases(unittest.TestCase):
def test_should_constructor_create_a_non_null_object(self):
problem = Fonseca()
self.assertIsNotNone(problem)
def test_should_constructor_create_a_valid_problem_with_default_settings(self):
problem = Fonseca()
self.assertEqual(3, problem.number_of_variables)
self.assertEqual(2, problem.number_of_objectives)
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual(3 * [-4], problem.lower_bound)
self.assertEqual(3 * [4], problem.upper_bound)
def test_should_create_solution_create_a_valid_float_solution(self):
problem = Fonseca()
solution = problem.create_solution()
self.assertEqual(3, solution.number_of_variables)
self.assertEqual(3, len(solution.variables))
self.assertEqual(2, solution.number_of_objectives)
self.assertEqual(2, len(solution.objectives))
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual(3 * [-4], problem.lower_bound)
self.assertEqual(3 * [4], problem.upper_bound)
self.assertTrue(solution.variables[0] >= -4)
self.assertTrue(solution.variables[0] <= 4)
def test_should_create_solution_return_right_evaluation_values(self):
problem = Fonseca()
solution1 = problem.create_solution()
solution1.variables[0] = -1.3
solution1.variables[1] = 1.5
solution1.variables[2] = 1.21
problem.evaluate(solution1)
self.assertAlmostEqual(solution1.objectives[0], 0.991563628, 4)
self.assertAlmostEqual(solution1.objectives[1], 0.999663388, 4)
def test_should_get_name_return_the_right_name(self):
problem = Fonseca()
self.assertEqual("Fonseca", problem.get_name())
class SchafferTestCases(unittest.TestCase):
def test_should_constructor_create_a_non_null_object(self):
problem = Schaffer()
self.assertIsNotNone(problem)
def test_should_constructor_create_a_valid_problem_with_default_settings(self):
problem = Schaffer()
self.assertEqual(1, problem.number_of_variables)
self.assertEqual(2, problem.number_of_objectives)
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-100000], problem.lower_bound)
self.assertEqual([100000], problem.upper_bound)
def test_should_create_solution_create_a_valid_float_solution(self):
problem = Schaffer()
solution = problem.create_solution()
self.assertEqual(1, solution.number_of_variables)
self.assertEqual(1, len(solution.variables))
self.assertEqual(2, solution.number_of_objectives)
self.assertEqual(2, len(solution.objectives))
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-100000], problem.lower_bound)
self.assertEqual([100000], problem.upper_bound)
self.assertTrue(solution.variables[0] >= -100000)
self.assertTrue(solution.variables[0] <= 100000)
def test_should_create_solution_return_right_evaluation_values(self):
problem = Schaffer()
solution1 = problem.create_solution()
solution2 = problem.create_solution()
solution1.variables[0] = 3
solution2.variables[0] = -2.6
problem.evaluate(solution1)
problem.evaluate(solution2)
self.assertAlmostEqual(solution1.objectives[0], 9)
self.assertAlmostEqual(solution1.objectives[1], 1)
self.assertAlmostEqual(solution2.objectives[0], 6.76)
self.assertAlmostEqual(solution2.objectives[1], 21.16)
def test_should_get_name_return_the_right_name(self):
problem = Schaffer()
self.assertEqual("Schaffer", problem.get_name())
class Viennet2TestCases(unittest.TestCase):
def test_should_constructor_create_a_non_null_object(self):
problem = Viennet2()
self.assertIsNotNone(problem)
def test_should_constructor_create_a_valid_problem_with_default_settings(self):
problem = Viennet2()
self.assertEqual(2, problem.number_of_variables)
self.assertEqual(3, problem.number_of_objectives)
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-4, -4], problem.lower_bound)
self.assertEqual([4, 4], problem.upper_bound)
def test_should_create_solution_create_a_valid_float_solution(self):
problem = Viennet2()
solution = problem.create_solution()
self.assertEqual(2, solution.number_of_variables)
self.assertEqual(2, len(solution.variables))
self.assertEqual(3, solution.number_of_objectives)
self.assertEqual(3, len(solution.objectives))
self.assertEqual(0, problem.number_of_constraints)
self.assertEqual([-4, -4], problem.lower_bound)
self.assertEqual([4, 4], problem.upper_bound)
self.assertTrue(solution.variables[0] >= -4)
self.assertTrue(solution.variables[0] <= 4)
def test_should_create_solution_return_right_evaluation_values(self):
problem = Viennet2()
solution2 = problem.create_solution()
solution2.variables[0] = -2.6
solution2.variables[1] = 1.5
problem.evaluate(solution2)
self.assertAlmostEqual(solution2.objectives[0], 14.0607692307)
self.assertAlmostEqual(solution2.objectives[1], -11.8818055555)
self.assertAlmostEqual(solution2.objectives[2], -11.1532369747)
def test_should_get_name_return_the_right_name(self):
problem = Viennet2()
self.assertEqual("Viennet2", problem.get_name())
if __name__ == '__main__':
unittest.main()
```
#### File: problem/multiobjective/zdt.py
```python
from math import sqrt, pow, sin, pi, cos
from jmetal.core.problem import FloatProblem
from jmetal.core.solution import FloatSolution
"""
.. module:: ZDT
:platform: Unix, Windows
:synopsis: ZDT problem family of multi-objective problems.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
class ZDT1(FloatProblem):
""" Problem ZDT1.
.. note:: Bi-objective unconstrained problem. The default number of variables is 30.
.. note:: Continuous problem having a convex Pareto front
"""
def __init__(self, number_of_variables: int=30):
""" :param number_of_variables: Number of decision variables of the problem.
"""
super(ZDT1, self).__init__()
self.number_of_variables = number_of_variables
self.number_of_objectives = 2
self.number_of_constraints = 0
self.obj_directions = [self.MINIMIZE, self.MINIMIZE]
self.obj_labels = ['x', 'y']
self.lower_bound = self.number_of_variables * [0.0]
self.upper_bound = self.number_of_variables * [1.0]
def evaluate(self, solution: FloatSolution) -> FloatSolution:
g = self.eval_g(solution)
h = self.eval_h(solution.variables[0], g)
solution.objectives[0] = solution.variables[0]
solution.objectives[1] = h * g
return solution
def eval_g(self, solution: FloatSolution):
g = sum(solution.variables) - solution.variables[0]
constant = 9.0 / (solution.number_of_variables - 1)
return constant * g + 1.0
def eval_h(self, f: float, g: float) -> float:
return 1.0 - sqrt(f / g)
def get_name(self):
return 'ZDT1'
class ZDT1Modified(ZDT1):
""" Problem ZDT1Modified.
.. note:: Version including a loop for increasing the computing time of the evaluation functions.
"""
def __init__(self, number_of_variables = 30):
super(ZDT1Modified, self).__init__(number_of_variables)
def evaluate(self, solution:FloatSolution) -> FloatSolution:
s: float = 0.0
for i in range(1000):
for j in range(10000):
s += i * 0.235 / 1.234 + 1.23525 * j
return super().evaluate(solution)
class ZDT2(ZDT1):
""" Problem ZDT2.
.. note:: Bi-objective unconstrained problem. The default number of variables is 30.
.. note:: Continuous problem having a non-convex Pareto front
"""
def eval_h(self, f: float, g: float) -> float:
return 1.0 - pow(f / g, 2.0)
def get_name(self):
return 'ZDT2'
class ZDT3(ZDT1):
""" Problem ZDT3.
.. note:: Bi-objective unconstrained problem. The default number of variables is 30.
.. note:: Continuous problem having a partitioned Pareto front
"""
def eval_h(self, f: float, g: float) -> float:
return 1.0 - sqrt(f / g) - (f / g) * sin(10.0 * f * pi)
def get_name(self):
return 'ZDT3'
class ZDT4(ZDT1):
""" Problem ZDT4.
.. note:: Bi-objective unconstrained problem. The default number of variables is 10.
.. note:: Continuous multi-modal problem having a convex Pareto front
"""
def __init__(self, number_of_variables: int=10):
""" :param number_of_variables: Number of decision variables of the problem.
"""
super(ZDT4, self).__init__(number_of_variables=number_of_variables)
self.lower_bound = self.number_of_variables * [-5.0]
self.upper_bound = self.number_of_variables * [5.0]
self.lower_bound[0] = 0.0
self.upper_bound[0] = 1.0
def eval_g(self, solution: FloatSolution):
g = 0.0
for i in range(1, solution.number_of_variables):
g += pow(solution.variables[i], 2.0) - 10.0 * cos(4.0 * pi * solution.variables[i])
g += 1.0 + 10.0 * (solution.number_of_variables - 1)
return g
def eval_h(self, f: float, g: float) -> float:
return 1.0 - sqrt(f / g)
def get_name(self):
return 'ZDT4'
class ZDT6(ZDT1):
""" Problem ZDT6.
.. note:: Bi-objective unconstrained problem. The default number of variables is 10.
.. note:: Continuous problem having a non-convex Pareto front
"""
def __init__(self, number_of_variables: int=10):
""" :param number_of_variables: Number of decision variables of the problem.
"""
super(ZDT6, self).__init__(number_of_variables=number_of_variables)
def eval_g(self, solution: FloatSolution):
g = sum(solution.variables) - solution.variables[0]
g = g / (solution.number_of_variables - 1)
g = pow(g, 0.25)
g = 9.0 * g
g = 1.0 + g
return g
def eval_h(self, f: float, g: float) -> float:
return 1.0 - pow(f / g, 2.0)
def get_name(self):
return 'ZDT6'
```
#### File: problem/singleobjective/unconstrained.py
```python
import math
import random
from jmetal.core.problem import BinaryProblem, FloatProblem
from jmetal.core.solution import BinarySolution, FloatSolution
"""
.. module:: unconstrained
:platform: Unix, Windows
:synopsis: Unconstrained test problems for single-objective optimization
.. moduleauthor:: <NAME> <<EMAIL>>, <NAME> <<EMAIL>>
"""
class OneMax(BinaryProblem):
def __init__(self, number_of_bits: int = 256):
super(OneMax, self).__init__()
self.number_of_bits = number_of_bits
self.number_of_objectives = 1
self.number_of_variables = 1
self.number_of_constraints = 0
self.obj_directions = [self.MINIMIZE]
self.obj_labels = ['Ones']
def evaluate(self, solution: BinarySolution) -> BinarySolution:
counter_of_ones = 0
for bits in solution.variables[0]:
if bits:
counter_of_ones += 1
solution.objectives[0] = -1.0 * counter_of_ones
return solution
def create_solution(self) -> BinarySolution:
new_solution = BinarySolution(number_of_variables=1, number_of_objectives=1)
new_solution.variables[0] = \
[True if random.randint(0, 1) == 0 else False for _ in range(self.number_of_bits)]
return new_solution
def get_name(self) -> str:
return 'OneMax'
class Sphere(FloatProblem):
def __init__(self, number_of_variables: int = 10):
super(Sphere, self).__init__()
self.number_of_objectives = 1
self.number_of_variables = number_of_variables
self.number_of_constraints = 0
self.obj_directions = [self.MINIMIZE]
self.obj_labels = ['f(x)']
self.lower_bound = [-5.12 for _ in range(number_of_variables)]
self.upper_bound = [5.12 for _ in range(number_of_variables)]
FloatSolution.lower_bound = self.lower_bound
FloatSolution.upper_bound = self.upper_bound
def evaluate(self, solution: FloatSolution) -> FloatSolution:
total = 0.0
for x in solution.variables:
total += x * x
solution.objectives[0] = total
return solution
def get_name(self) -> str:
return 'Sphere'
class Rastrigin(FloatProblem):
def __init__(self, number_of_variables: int = 10):
super(Rastrigin, self).__init__()
self.number_of_objectives = 1
self.number_of_variables = number_of_variables
self.number_of_constraints = 0
self.obj_directions = [self.MINIMIZE]
self.obj_labels = ['f(x)']
self.lower_bound = [-5.12 for _ in range(number_of_variables)]
self.upper_bound = [5.12 for _ in range(number_of_variables)]
FloatSolution.lower_bound = self.lower_bound
FloatSolution.upper_bound = self.upper_bound
def evaluate(self, solution: FloatSolution) -> FloatSolution:
a = 10.0
result = a * solution.number_of_variables
x = solution.variables
for i in range(solution.number_of_variables):
result += x[i] * x[i] - a * math.cos(2 * math.pi * x[i])
solution.objectives[0] = result
return solution
def get_name(self) -> str:
return 'Rastrigin'
class SubsetSum(BinaryProblem):
def __init__(self, C: int, W: list):
""" The goal is to find a subset S of W whose elements sum is closest to (without exceeding) C.
:param C: Large integer.
:param W: Set of non-negative integers."""
super(SubsetSum, self).__init__()
self.C = C
self.W = W
self.number_of_bits = len(self.W)
self.number_of_objectives = 1
self.number_of_variables = 1
self.number_of_constraints = 0
self.obj_directions = [self.MAXIMIZE]
self.obj_labels = ['Sum']
def evaluate(self, solution: BinarySolution) -> BinarySolution:
total_sum = 0.0
for index, bits in enumerate(solution.variables[0]):
if bits:
total_sum += self.W[index]
if total_sum > self.C:
total_sum = self.C - total_sum * 0.1
if total_sum < 0.0:
total_sum = 0.0
solution.objectives[0] = -1.0 * total_sum
return solution
def create_solution(self) -> BinarySolution:
new_solution = BinarySolution(number_of_variables=self.number_of_variables,
number_of_objectives=self.number_of_objectives)
new_solution.variables[0] = \
[True if random.randint(0, 1) == 0 else False for _ in range(self.number_of_bits)]
return new_solution
def get_name(self) -> str:
return 'Subset Sum'
```
#### File: jmetal/util/ckecking.py
```python
class NoneParameterException(Exception):
def __init__(self, message: str = ""):
self.error_message = message
class InvalidConditionException(Exception):
def __init__(self, message: str):
self.error_message = message
class EmptyCollectionException(RuntimeError):
def __init__(self):
super(EmptyCollectionException, self).__init__("The collection is empty")
# class InvalidConditionException(RuntimeError):
# def __init__(self, message):
# super(InvalidConditionException, self).__init__(message)
class InvalidProbabilityValueException(RuntimeError):
def __init__(self, value: float):
super(InvalidProbabilityValueException, self).__init__(
"The parameter " + str(value) + " is not a valid probability value")
class ValueOutOfRangeException(RuntimeError):
def __init__(self, value: float, lowest_value: float, highest_value: float):
super(ValueOutOfRangeException, self).__init__(
"The parameter " + str(value) + " is not in the range (" + str(lowest_value) + ", " + str(
highest_value) + ")")
class Check:
@staticmethod
def is_not_none(obj):
if obj is None:
raise NoneParameterException()
@staticmethod
def probability_is_valid(value: float):
if value < 0.0 or value > 1.0:
raise InvalidProbabilityValueException(value)
@staticmethod
def value_is_in_range(value: float, lowest_value: float, highest_value: float):
if value < lowest_value or value > highest_value:
raise ValueOutOfRangeException(value, lowest_value, highest_value)
@staticmethod
def collection_is_not_empty(collection):
if len(collection) == 0:
raise EmptyCollectionException
@staticmethod
def that(expression: bool, message: str):
if not expression:
raise InvalidConditionException(message)
"""
class Check:
@staticmethod
def is_not_null(o: object, message: str = ""):
if o is None:
raise NoneParameterException(message)
@staticmethod
def that(expression: bool, message: str = ""):
if not expression:
raise InvalidConditionException(message)
"""
```
#### File: util/test/test_distance.py
```python
import unittest
from jmetal.util.distance import EuclideanDistance
class EuclideanDistanceTestCases(unittest.TestCase):
def test_should_get_distance_work_properly_case_1(self):
""" Case 1: [1], [1] -> distance == 0 """
distance = EuclideanDistance()
self.assertEqual(0, distance.get_distance([1], [1]))
def test_should_get_distance_work_properly_case_2(self):
""" Case 2: [1, 0, 0], [0, 1, 0] -> distance == 1.4142135623730951 """
distance = EuclideanDistance()
self.assertEqual(1.4142135623730951, distance.get_distance([1, 0, 0], [0, 1, 0]))
def test_should_get_distance_work_properly_case_3(self):
""" Case 3: [1, 1, 0], [0, 1, 0] -> distance == 1.0 """
distance = EuclideanDistance()
self.assertEqual(1.0, distance.get_distance([1, 1, 0], [0, 1, 0]))
"""
class CosineDistanceTestCases(unittest.TestCase):
def test_should_identical_points_have_a_distance_of_zero(self):
reference_point = [0.0, 0.0]
distance = CosineDistance(reference_point)
self.assertEqual(0.0, distance.get_distance([1.0, 1.0], [1.0, 1.0]))
def test_should_points_in_the_same_direction_have_a_distance_of_zero(self):
reference_point = [0.0, 0.0]
distance = CosineDistance(reference_point)
self.assertEqual(0.0, distance.get_distance([1.0, 1.0], [2.0, 2.0]))
def test_should_two_perpendicular_points_have_a_distance_of_one(self):
reference_point = [0.0, 0.0]
distance = CosineDistance(reference_point)
self.assertEqual(1.0, distance.get_distance([0.0, 1.0], [1.0, 0.0]))
"""
if __name__ == '__main__':
unittest.main()
```
#### File: util/test/test_neighborhood.py
```python
import unittest
import numpy
from jmetal.core.solution import Solution
from jmetal.util.ckecking import NoneParameterException, InvalidConditionException
from jmetal.util.neighborhood import WeightVectorNeighborhood, TwoDimensionalMesh, L5
class WeightVectorNeighborhoodTestCases(unittest.TestCase):
def test_should_constructor_work_properly(self) -> None:
number_of_weight_vectors = 100
neighborhood_size = 20
neighborhood: WeightVectorNeighborhood = WeightVectorNeighborhood(number_of_weight_vectors, neighborhood_size)
self.assertEqual(number_of_weight_vectors, neighborhood.number_of_weight_vectors)
self.assertEqual(neighborhood_size, neighborhood.neighborhood_size)
self.assertEqual(2, neighborhood.weight_vector_size)
self.assertEqual(0.0, neighborhood.weight_vectors[0][0])
self.assertEqual(1.0, neighborhood.weight_vectors[0][1])
self.assertEqual(0.0101010101010101010101, neighborhood.weight_vectors[1][0])
self.assertEqual(0.989898989898989898, neighborhood.weight_vectors[1][1])
self.assertEqual(1.0, neighborhood.weight_vectors[99][0])
self.assertEqual(0.0, neighborhood.weight_vectors[99][1])
self.assertTrue(numpy.array_equal(numpy.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]),
neighborhood.neighborhood[0]))
self.assertTrue(numpy.array_equal(numpy.array([69, 70, 68, 71, 67, 72, 66, 73, 65, 64, 74, 75, 63, 76, 62, 77, 61, 78, 60, 79]),
neighborhood.neighborhood[69]))
def test_should_get_neighbors_work_properly_with_two_objectives(self):
number_of_weight_vectors = 100
neighborhood_size = 20
neighborhood: WeightVectorNeighborhood = WeightVectorNeighborhood(number_of_weight_vectors, neighborhood_size)
solution_list = [Solution(2, 2) for _ in range(number_of_weight_vectors)]
neighbors = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(neighborhood_size, len(neighbors))
self.assertTrue(solution_list[0] == neighbors[0])
self.assertTrue(solution_list[19] == neighbors[19])
neighbors = neighborhood.get_neighbors(69, solution_list)
self.assertEqual(neighborhood_size, len(neighbors))
self.assertTrue(solution_list[69] == neighbors[0])
self.assertTrue(solution_list[79] == neighbors[19])
class TwoDimensionalMeshTestCases(unittest.TestCase):
def test_should_get_neighbors_throw_an_exception_if_the_solution_list_is_none(self):
"""
Topology:
north = -1, 0
south = 1, 0
east = 0, 1
west = 0, -1
:return:
"""
neighborhood = TwoDimensionalMesh(3, 3, [[-1, 0], [1, 0], [0, 1], [0, -1]])
with self.assertRaises(NoneParameterException):
neighborhood.get_neighbors(0, None)
def test_should_get_neighbors_throw_an_exception_if_the_solution_list_is_empty(self):
"""
Topology:
north = -1, 0
south = 1, 0
east = 0, 1
west = 0, -1
"""
neighborhood = TwoDimensionalMesh(3, 3, [[-1, 0], [1, 0], [0, 1], [0, -1]])
with self.assertRaises(InvalidConditionException):
neighborhood.get_neighbors(0, [])
def test_should_get_neighbors_return_four_neighbors_case1(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
6 7 8
The solution location is 1, so the neighborhood is 7, 0, 2, 4
"""
rows = 3
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(1, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[7] in result)
self.assertTrue(solution_list[0] in result)
self.assertTrue(solution_list[2] in result)
self.assertTrue(solution_list[4] in result)
def test_should_get_neighbors_return_four_neighbors_case2(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
6 7 8
The solution location is 4, so the neighborhood is 1, 3, 5, 7
"""
rows = 3
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(4, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[1] in result)
self.assertTrue(solution_list[3] in result)
self.assertTrue(solution_list[5] in result)
self.assertTrue(solution_list[7] in result)
def test_should_get_neighbors_return_four_neighbors_case3(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
6 7 8
The solution location is 0, so the neighborhood is 1, 3, 2, 6
"""
rows = 3
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[1] in result)
self.assertTrue(solution_list[3] in result)
self.assertTrue(solution_list[2] in result)
self.assertTrue(solution_list[6] in result)
def test_should_get_neighbors_return_four_neighbors_case4(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
6 7 8
The solution location is 2, so the neighborhood is 1, 5, 8, 0
"""
rows = 3
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(2, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[1] in result)
self.assertTrue(solution_list[5] in result)
self.assertTrue(solution_list[8] in result)
self.assertTrue(solution_list[0] in result)
def test_should_get_neighbors_return_four_neighbors_case5(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
6 7 8
The solution location is 8, so the neighborhood is 2, 5, 6, 7
"""
rows = 3
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(8, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[2] in result)
self.assertTrue(solution_list[5] in result)
self.assertTrue(solution_list[6] in result)
self.assertTrue(solution_list[7] in result)
def test_should_get_neighbors_return_four_neighbors_case6(self):
"""
Case 1
Solution list:
0 1 2
3 4 5
The solution location is 0, so the neighborhood is 1, 3, 3, 2
"""
rows = 2
columns = 3
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = TwoDimensionalMesh(rows, columns, [[-1, 0], [1, 0], [0, 1], [0, -1]])
result = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[1] in result)
self.assertTrue(solution_list[3] in result)
self.assertTrue(solution_list[2] in result)
class L5TestCases(unittest.TestCase):
def test_should_get_neighbors_return_four_neighbors_case1(self):
rows = 1
columns = 1
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = L5(rows, columns)
result = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(4, len(result))
def test_should_get_neighbors_return_four_neighbors_case2(self):
"""
Solution list: 0, 1
Solution location: 0; the neighborhood is: 0, 1
"""
rows = 1
columns = 2
solution_list = []
for i in range(rows * columns):
solution = Solution(i, 2)
solution.variables = [i, i+1]
solution_list.append(solution)
neighborhood = L5(rows, columns)
result = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[0] in result)
self.assertTrue(solution_list[1] in result)
self.assertEqual(2, result.count(solution_list[0]))
self.assertEqual(2, result.count(solution_list[1]))
def test_should_get_neighbors_return_four_neighbors_case3(self):
"""
Solution list: 0, 1
Solution location: 1; the neighborhood is: 0, 1
"""
rows = 1
columns = 2
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = L5(rows, columns)
result = neighborhood.get_neighbors(1, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[0] in result)
self.assertTrue(solution_list[1] in result)
self.assertEqual(2, result.count(solution_list[0]))
self.assertEqual(2, result.count(solution_list[1]))
def test_should_get_neighbors_return_four_neighbors_case4(self):
"""
Solution list:
0 1
2 3
Solution location: 0; the neighborhood is: 1, 2
"""
rows = 2
columns = 2
solution_list = [Solution(i, 2) for i in range(rows * columns)]
neighborhood = L5(rows, columns)
result = neighborhood.get_neighbors(0, solution_list)
self.assertEqual(4, len(result))
self.assertTrue(solution_list[1] in result)
self.assertTrue(solution_list[2] in result)
self.assertTrue(solution_list[3] not in result)
self.assertTrue(solution_list[0] not in result)
self.assertEqual(2, result.count(solution_list[1]))
self.assertEqual(2, result.count(solution_list[2]))
if __name__ == '__main__':
unittest.main()
```
#### File: util/test/test_replacement.py
```python
import unittest
from jmetal.core.solution import Solution
from jmetal.util.density_estimator import KNearestNeighborDensityEstimator
from jmetal.util.ranking import StrengthRanking, FastNonDominatedRanking
from jmetal.util.replacement import RankingAndDensityEstimatorReplacement
class RankingAndDensityEstimatorReplacementTestCases(unittest.TestCase):
def test_should_replacement_return_the_list_if_the_offspring_list_is_empty(self):
"""
5 1
4 2
3 3
2
1 4
0 1 2 3 4 5
"""
ranking = StrengthRanking()
density_estimator = KNearestNeighborDensityEstimator(1)
replacement = RankingAndDensityEstimatorReplacement(ranking, density_estimator)
solution1 = Solution(2, 2)
solution1.objectives = [1, 5]
solution2 = Solution(2, 2)
solution2.objectives = [2, 4]
solution3 = Solution(2, 2)
solution3.objectives = [3, 3]
solution4 = Solution(2, 2)
solution4.objectives = [5, 1]
solution_list = [solution1, solution2, solution3, solution4]
result_list = replacement.replace(solution_list, [])
self.assertEqual(4, len(result_list))
self.assertEqual(0, solution1.attributes['strength_ranking'])
self.assertEqual(0, solution2.attributes['strength_ranking'])
self.assertEqual(0, solution3.attributes['strength_ranking'])
self.assertEqual(0, solution4.attributes['strength_ranking'])
def test_should_replacement_return_the_right_value_case1(self):
"""
5 1
4 2
3 3
2
1 4
0 1 2 3 4 5
List: 1,2,3 OffspringList: 4
Expected result: 4, 1, 3
"""
ranking = StrengthRanking()
density_estimator = KNearestNeighborDensityEstimator(1)
replacement = RankingAndDensityEstimatorReplacement(ranking, density_estimator)
solution1 = Solution(2, 2)
solution1.objectives = [1, 5]
solution2 = Solution(2, 2)
solution2.objectives = [2, 4]
solution3 = Solution(2, 2)
solution3.objectives = [3, 3]
solution4 = Solution(2, 2)
solution4.objectives = [5, 1]
solution_list = [solution1, solution2, solution3]
offspring_list = [solution4]
result_list = replacement.replace(solution_list, offspring_list)
self.assertEqual(3, len(result_list))
self.assertTrue(solution1 in result_list)
self.assertTrue(solution3 in result_list)
self.assertTrue(solution4 in result_list)
def test_should_replacement_return_the_right_value_case2(self):
"""
5 1
4 2
3 3
2 5
1 4
0 1 2 3 4 5
List: 1,2,4 OffspringList: 3,5
Expected result: 1, 5, 4
"""
ranking = StrengthRanking()
density_estimator = KNearestNeighborDensityEstimator(1)
replacement = RankingAndDensityEstimatorReplacement(ranking, density_estimator)
solution1 = Solution(2, 2)
solution1.objectives = [1, 5]
solution2 = Solution(2, 2)
solution2.objectives = [2, 4]
solution3 = Solution(2, 2)
solution3.objectives = [3, 3]
solution4 = Solution(2, 2)
solution4.objectives = [5, 1]
solution5 = Solution(2, 2)
solution5.objectives = [2.5, 2.5]
solution_list = [solution1, solution2, solution4]
offspring_list = [solution3, solution5]
result_list = replacement.replace(solution_list, offspring_list)
self.assertEqual(0, solution1.attributes['strength_ranking'])
self.assertEqual(0, solution2.attributes['strength_ranking'])
self.assertEqual(1, solution3.attributes['strength_ranking'])
self.assertEqual(0, solution4.attributes['strength_ranking'])
self.assertEqual(0, solution5.attributes['strength_ranking'])
self.assertEqual(3, len(result_list))
self.assertTrue(solution1 in result_list)
self.assertTrue(solution5 in result_list)
self.assertTrue(solution4 in result_list)
def test_should_replacement_return_the_right_value_case3(self):
"""
"""
points_population = [[0.13436424411240122, 4.323216008886963],
[0.23308445025757263, 4.574937990387161],
[0.17300740157905092, 4.82329350808847],
[0.9571162814602269, 3.443495331489301],
[0.25529404008730594, 3.36387501100745],
[0.020818108509287336, 5.1051826661880515],
[0.8787178982088466, 3.2716009445324103],
[0.6744550697237632, 3.901350307095427],
[0.7881164487252263, 3.1796004913916516],
[0.1028341459863098, 4.9409270526888935]]
points_offspring_population = [[0.3150521745650882, 4.369120371847888],
[0.8967291504209932, 2.506948771242972],
[0.6744550697237632, 3.9361442668874504],
[0.9571162814602269, 3.4388386707431433],
[0.13436424411240122, 4.741872175943253],
[0.25529404008730594, 2.922302861104415],
[0.23308445025757263, 4.580180404770213],
[0.23308445025757263, 4.591260299892424],
[0.9571162814602269, 2.9865495383518694],
[0.25529404008730594, 3.875587748122183]]
ranking = FastNonDominatedRanking()
density_estimator = KNearestNeighborDensityEstimator(1)
population = []
for i in range(len(points_population)):
population.append(Solution(2, 2))
population[i].objectives = points_population[i]
offspring_population = []
for i in range(len(points_offspring_population)):
offspring_population.append(Solution(2, 2))
offspring_population[i].objectives = points_offspring_population[i]
replacement = RankingAndDensityEstimatorReplacement(ranking, density_estimator)
result_list = replacement.replace(population, offspring_population)
self.assertEqual(10,len(result_list))
for solution in result_list[0:4]:
self.assertEqual(0, solution.attributes['dominance_ranking'])
for solution in result_list[5:9]:
self.assertEqual(1, solution.attributes['dominance_ranking'])
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "12yuens2/reconfigurable_organisms",
"score": 2
} |
#### File: reconfigurable_organisms/data_analysis/Time_in_contact_with_ground.py
```python
import cPickle
from glob import glob
import subprocess as sub
import numpy as np
import pandas as pd
from softbot import Genotype, Phenotype
from base import Env
from tools.read_write_voxelyze import write_voxelyze_file
from tools.utils import quadruped
np.random.seed(1)
EXP_NAME = "XENO_6"
PICKLE_DIR = "/home/sam/Projects/research_code/evosoro/data_analysis/results"
SEND_ROBOTS_TO_SIM = False
COLLECT_FITNESS_FILES = False
LOAD_DF_FROM_PICKLE = True
TIME_BETWEEN_TRACES = 0.01
GEN = 1000
start_run = 1
RUNS = 100
IND_SIZE = (8, 8, 7)
STIFFNESS = 5e6
INIT_TIME = 1
SIM_TIME = 10.0 + INIT_TIME # includes init time
TEMP_AMP = 39.4714242553 # 50% volumetric change with temp_base=25: (1+0.01*(39.4714242553-25))**3-1=0.5
FREQ = 2
DT_FRAC = 0.9
VOXEL_SIZE = 0.05 # meters
GRAV_ACC = -0.1
DRAW_SHADOW = True # todo
FLUID_ENV = 1 # if 1 drag forces are added
RHO_FLUID = 1000.0 # water density
C_DRAG = 1.5 # fluid drag associated to a triangular facet
AGGREGATE_DRAG_COEF = 0.5 * C_DRAG * RHO_FLUID # aggregate drag coefficient
def xeno_quad(output_state):
shape = quadruped((8, 8, 7), mat=1)
mat = np.greater(output_state, 0)*3
mat[mat == 0] = 1
mat[shape == 0] = 0
return mat
if SEND_ROBOTS_TO_SIM:
sub.call("mkdir {0}/GroundPenetration_{1}".format(PICKLE_DIR, EXP_NAME), shell=True)
for run in range(start_run, RUNS + 1):
sub.call("mkdir {0}/GroundPenetration_{1}/Run_{2}".format(PICKLE_DIR, EXP_NAME, run), shell=True)
sub.call("mkdir {0}/GroundPenetration_{1}/Run_{2}/voxelyzeFiles && "
"mkdir {0}/GroundPenetration_{1}/Run_{2}/fitnessFiles".format(PICKLE_DIR, EXP_NAME, run),
shell=True)
MyPhenotype = Phenotype
MyGenotype = Genotype
orig_fit_dict = {}
# for exp_name in EXP_NAMES:
for run in range(start_run, RUNS+1):
# clear directories
sub.call("rm {0}/GroundPenetration_{1}/Run_{2}/voxelyzeFiles/*".format(PICKLE_DIR, EXP_NAME, run), shell=True)
sub.call("rm {0}/GroundPenetration_{1}/Run_{2}/fitnessFiles/*".format(PICKLE_DIR, EXP_NAME, run), shell=True)
pickle = "{0}/Exp_{1}/Run_{2}/pickledPops/Gen_{3}.pickle".format(PICKLE_DIR, EXP_NAME, run, GEN)
with open(pickle, 'rb') as handle:
[optimizer, random_state, numpy_random_state] = cPickle.load(handle)
optimizer.sim.simulation_time = SIM_TIME
# load current population from pickle
pop = optimizer.pop
# get the current run champion
best_ind = None
best_fit_so_far = 0
for ind in pop:
# if ind.fitness == pop.best_fit_so_far:
# best_ind = ind
if ind.fitness > best_fit_so_far:
best_ind = ind
best_fit_so_far = ind.fitness
orig_fit_dict[run] = best_fit_so_far
my_env = Env(temp_amp=TEMP_AMP, fluid_environment=FLUID_ENV, aggregate_drag_coefficient=AGGREGATE_DRAG_COEF,
lattice_dimension=VOXEL_SIZE, grav_acc=GRAV_ACC, frequency=FREQ, muscle_stiffness=STIFFNESS,
time_between_traces=TIME_BETWEEN_TRACES)
# this time save traces (if TIME_BETWEEN_TRACES > 0)
my_env.time_between_traces = TIME_BETWEEN_TRACES
save_dir = "{0}/GroundPenetration_{1}/Run_{2}/".format(PICKLE_DIR, EXP_NAME, run) # voxelyzeFiles
write_voxelyze_file(optimizer.sim, my_env, best_ind, save_dir, "GroundPenetration")
# evaluate all robots in simulator #
for run in range(start_run, RUNS + 1):
robots = "{0}/GroundPenetration_{1}/Run_{2}/voxelyzeFiles/*".format(PICKLE_DIR, EXP_NAME, run)
for vxa in glob(robots):
print "sending robot {} to the simulator".format(run)
sub.Popen("/home/sam/Projects/research_code/evosoro/_voxcad/voxelyzeMain/voxelyze -f " + vxa, shell=True)
if COLLECT_FITNESS_FILES:
ground_touch_dict = {
run: {"Time": [], "TraceX": [], "TraceY": [], "TraceZ": [], "NumTouchingGround": []}
for run in range(1, RUNS + 1)
}
all_tag_keys = [("<" + k + ">", k) for k, v in ground_touch_dict[1].items()]
for run in range(start_run, RUNS+1):
# print "collecting data for robot", run
robots = glob("{0}/GroundPenetration_{1}/Run_{2}/fitnessFiles/*".format(PICKLE_DIR, EXP_NAME, run))
for robot in robots:
name = int(robot[robot.find("id_")+3:-4])
this_robot = open(robot)
for line in this_robot:
for tag, key in all_tag_keys:
if tag in line:
ground_touch_dict[run][key] += [float(line[line.find(tag) + len(tag):line.find("</" + tag[1:])])]
with open('{0}/{1}_GroundPenetration_Dict.pickle'.format(PICKLE_DIR, EXP_NAME), 'wb') as handle:
cPickle.dump(ground_touch_dict, handle, protocol=cPickle.HIGHEST_PROTOCOL)
df = pd.DataFrame()
robot = []
for run in range(start_run, RUNS+1):
this_df = pd.DataFrame.from_dict(ground_touch_dict[run])
robot += [run]*len(this_df)
df = pd.concat([df, this_df])
df['robot'] = robot
df.to_pickle('{0}/{1}_GroundPenetration_DataFrame.pickle'.format(PICKLE_DIR, EXP_NAME))
if LOAD_DF_FROM_PICKLE:
with open('{0}/{1}_GroundPenetration_DataFrame.pickle'.format(PICKLE_DIR, EXP_NAME), 'rb') as handle:
df = cPickle.load(handle)
# print df
penetration = df["NumTouchingGround"]
X, Y, Z = df["TraceX"], df["TraceY"], df["TraceZ"]
time = df["Time"]
t = (np.array(time) - np.min(time)) / (np.max(time) - np.min(time))
# df = df[t > 0.25]
c = []
for n in range(1, 100):
c += [np.sum(df.loc[df.robot==n, "NumTouchingGround"]>0) / float(len(df.loc[df.robot==1, "NumTouchingGround"]))]
print np.mean(c)
```
#### File: reconfigurable_organisms/data_analysis/trajectories.py
```python
import cPickle
import numpy as np
import os.path
import subprocess as sub
from softbot import Genotype, Phenotype
from tools.utils import quadruped
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib
import matplotlib.cm as cm
import matplotlib.colors as colors
from mpl_toolkits.mplot3d import Axes3D
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
sns.set(color_codes=True, context="poster")
sns.set_style("white", {'font.family': 'serif', 'font.serif': 'Times New Roman'})
cc = ["light red", "cyan", "apricot"]
sns.set_palette(sns.xkcd_palette(cc), desat=.9)
GEN = 1000
EXP_NAME = "XENO_3"
RUN = 2 # 16, 52, 6, 2
AZIM = -60 # -120, -120, -90, -60
PICKLE_DIR = "/home/sam/Projects/research_code/evosoro/data_analysis/results/{0}_Gen_{1}".format(EXP_NAME, GEN)
MyGenotype = Genotype
MyPhenotype = Phenotype
# Traces
trace_pickle = "/home/sam/Projects/research_code/evosoro/data_analysis/results/{}_Trace_DataFrame.pickle".format(EXP_NAME)
with open(trace_pickle, 'rb') as handle:
traces_df = cPickle.load(handle)
# spun traces
spun_trace_pickle = "/home/sam/Projects/research_code/evosoro/data_analysis/results/{}_Trace_DataFrame_Spun.pickle".format(EXP_NAME)
with open(spun_trace_pickle, 'rb') as handle:
spun_traces_df = cPickle.load(handle)
all_time = spun_traces_df["Time"]
min_time = np.min(all_time)
max_time = np.max(all_time)
minX, maxX = spun_traces_df["TraceX"].min(), spun_traces_df["TraceX"].max()
minY, maxY = spun_traces_df["TraceY"].min(), spun_traces_df["TraceY"].max()
minZ, maxZ = spun_traces_df["TraceZ"].min(), spun_traces_df["TraceZ"].max()
def get_orientation(x, y):
a = np.arctan2(y[-1], x[-1])
# print a
if -np.pi < a <= -3*np.pi/4.0:
# print y[-1], x[-1]
return 2 # looks good
if 3*np.pi/4.0 < a <= np.pi:
# print y[-1], x[-1]
return 2 # so this is good
if -3*np.pi/4.0 < a <= -np.pi/4.0:
# print y[-1], x[-1]
return 1
if np.pi/4.0 < a <= 3*np.pi/4.0:
# print y[-1], x[-1]
return -1 # correct
if -np.pi/4.0 < a <= np.pi/4.0:
# print y[-1], x[-1]
# print a
if a < 0: # -np.pi/8.0:
# print "adjust +"
return 1 # adjust for plotting angle
# if a > np.pi/5.0:
# print "adjust -"
# return -1
return 0
print " whoops"
def xeno_quad(output_state):
shape = quadruped((8, 8, 7), mat=1)
mat = np.greater(output_state, 0)*3
mat[mat == 0] = 1
mat[shape == 0] = 0
return mat
fig = plt.figure()
pickle = "{3}/Exp_{0}_Run_{1}_Gen_{2}.pickle".format(EXP_NAME, RUN, GEN, PICKLE_DIR)
with open(pickle, 'rb') as handle:
[optimizer, random_state, numpy_random_state] = cPickle.load(handle)
pop = optimizer.pop
best_ind = None
best_fit_so_far = 0
for n, ind in enumerate(pop):
if ind.fitness > best_fit_so_far:
best_ind = ind
best_fit_so_far = ind.fitness
size = best_ind.genotype.orig_size_xyz
this_trace = traces_df[traces_df["robot"] == RUN]
spin = get_orientation(this_trace["TraceX"].values, this_trace["TraceY"].values)
this_spun_trace = spun_traces_df[traces_df["robot"] == RUN]
thisX, thisY, thisZ = this_spun_trace["TraceX"], this_spun_trace["TraceY"], this_spun_trace["TraceZ"]
# print spin
for name, details in best_ind.genotype.to_phenotype_mapping.items():
if name == "material":
shape = details["state"]
shape = np.rot90(shape, k=spin, axes=(0, 1))
ax = fig.add_subplot(1, 1, 1, projection='3d')
# print 'tracing behavior'
time = this_trace["Time"]
norm_time = (time-min_time)/(max_time-min_time)
# norm_time = [cm.jet(t) for t in norm_time]
normX = (thisX-minX)/(maxX-minX)
normY = (thisY-minY)/(maxY-minY)
normZ = (thisZ-minZ)/(maxZ-minZ)
this_maxX = normX.max()
this_maxY = normY.max()
this_minZ = normZ.min()
this_maxZ = normZ.max()
print this_minZ, this_maxZ
ax.scatter(normX, normY, normZ*0.25,
c=norm_time,
cmap="spring_r",
s=60,
alpha=0.5
)
# ax.quiver(normX.values[-1], normY.values[-1], normZ.values[-1],
# normX.values[-1]-normX.values[-2],
# normY.values[-1]-normY.values[-2],
# normZ.values[-1]-normZ.values[-2],
# length=25, arrow_length_ratio=2, pivot="tail", color=cm.spring_r(0.999999), linewidth=1)
ax.set_xlim([0, 1])
ax.set_ylim([0, 1])
ax.set_zlim([0, 1])
ax.set_aspect('equal')
ax.view_init(elev=0, azim=AZIM)
ax.set_axis_off()
# save it
# fig.subplots_adjust(wspace=-0.8, hspace=-0.1) # 100
# fig.subplots_adjust(wspace=-0.6, hspace=-0.1) # 25
bbox = fig.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
# dpi = 300*bbox.width/3.125
# dpi = 600*bbox.height/9.0
dpi = 300 # 900
print 'dpi = ', dpi
plt.savefig("plots/{0}_trace_{1}.png".format(EXP_NAME, RUN), bbox_inches='tight', dpi=int(dpi), transparent=True)
```
#### File: reconfigurable_organisms/exp/Algorithm_Analysis_EA.py
```python
import random
import os
import sys
import numpy as np
import subprocess as sub
from base import Sim, Env, ObjectiveDict
from networks import DirectEncoding
from softbot import Genotype, Phenotype, Population
from tools.algorithms import ParetoOptimization
from tools.checkpointing import continue_from_checkpoint
from tools.utils import muscle_fat, make_one_shape_only
sub.call("cp ~/tmp/research_code/evosoro/_voxcad/voxelyzeMain/voxelyze .", shell=True)
sub.call("chmod 755 voxelyze", shell=True)
SEED = int(sys.argv[1])
MAX_TIME = float(sys.argv[2])
IND_SIZE = (2, 2, 2)
VOXEL_SIZE = 0.05 # meters
STIFFNESS = 5e6
POP_SIZE = 50
MAX_GENS = 10000000
NUM_RANDOM_INDS = 1
INIT_TIME = 1
SIM_TIME = 10.0 + INIT_TIME # includes init time
TEMP_AMP = 39.4714242553 # 50% volumetric change with temp_base=25: (1+0.01*(39.4714242553-25))**3-1=0.5
FREQ = 2
DT_FRAC = 0.9 # 0.3
# Swimming Parameters
GRAV_ACC = -0.1
FLUID_ENV = 1 # if 1 drag forces are added
RHO_FLUID = 1000.0 # water density
C_DRAG = 1.5 # fluid drag associated to a triangular facet
AGGREGATE_DRAG_COEF = 0.5 * C_DRAG * RHO_FLUID # aggregate drag coefficient
TIME_TO_TRY_AGAIN = 10
MAX_EVAL_TIME = 25
SAVE_VXA_EVERY = MAX_GENS + 1
SAVE_LINEAGES = False
CHECKPOINT_EVERY = 1
EXTRA_GENS = 0
RUN_DIR = "run_{}".format(SEED)
RUN_NAME = "Swimmers"
def encoding(material):
material = np.reshape(material, IND_SIZE)
# mask = make_one_shape_only(material)
# new = np.array(mask)
# new[material == 1] *= 1
# new[material == 3] *= 3
if np.sum(material == 3) == 0:
new = np.zeros(IND_SIZE)
new[0, 0, 0] = 3
return material
class MyGenotype(Genotype):
def __init__(self):
Genotype.__init__(self, orig_size_xyz=IND_SIZE)
self.add_network(DirectEncoding(output_node_name="phase_offset", orig_size_xyz=IND_SIZE, symmetric=False),
freeze=True)
self.add_network(DirectEncoding(output_node_name="material", orig_size_xyz=IND_SIZE, func=encoding, scale=1,
symmetric=False, lower_bound=-100, upper_bound=100, vox_options=[0, 1, 3]))
self.to_phenotype_mapping.add_map(name="phase_offset", tag="<PhaseOffset>", logging_stats=None)
self.to_phenotype_mapping.add_map(name="material", tag="<Data>", output_type=int, logging_stats=None)
if not os.path.isfile("./" + RUN_DIR + "/pickledPops/Gen_0.pickle"):
random.seed(SEED)
np.random.seed(SEED)
# print np.sum(np.random.rand(*IND_SIZE)) # seed=1; 226.98967645847415
MyGenotype.NET_DICT = {"phase_offset": np.random.rand(*IND_SIZE)}
my_sim = Sim(dt_frac=DT_FRAC, simulation_time=SIM_TIME, fitness_eval_init_time=INIT_TIME)
my_env = Env(temp_amp=TEMP_AMP, fluid_environment=FLUID_ENV, aggregate_drag_coefficient=AGGREGATE_DRAG_COEF,
lattice_dimension=VOXEL_SIZE, grav_acc=GRAV_ACC, frequency=FREQ, muscle_stiffness=STIFFNESS)
my_objective_dict = ObjectiveDict()
my_objective_dict.add_objective(name="fitness", maximize=True, tag="<normAbsoluteDisplacement>")
my_objective_dict.add_objective(name="age", maximize=False, tag=None)
my_pop = Population(my_objective_dict, MyGenotype, Phenotype, pop_size=POP_SIZE)
my_optimization = ParetoOptimization(my_sim, my_env, my_pop)
my_optimization.run(max_hours_runtime=MAX_TIME, max_gens=MAX_GENS, num_random_individuals=NUM_RANDOM_INDS,
directory=RUN_DIR, name=RUN_NAME, max_eval_time=MAX_EVAL_TIME,
time_to_try_again=TIME_TO_TRY_AGAIN, checkpoint_every=CHECKPOINT_EVERY,
save_vxa_every=SAVE_VXA_EVERY, save_lineages=SAVE_LINEAGES)
else:
continue_from_checkpoint(directory=RUN_DIR, additional_gens=EXTRA_GENS, max_hours_runtime=MAX_TIME,
max_eval_time=MAX_EVAL_TIME, time_to_try_again=TIME_TO_TRY_AGAIN,
checkpoint_every=CHECKPOINT_EVERY, save_vxa_every=SAVE_VXA_EVERY,
save_lineages=SAVE_LINEAGES)
```
#### File: reconfigurable_organisms/exp/Object_Transport.py
```python
import random
import os
import sys
import numpy as np
import subprocess as sub
from functools import partial
from base import Sim, Env, ObjectiveDict
from networks import CPPN, DirectEncoding
from softbot import Genotype, Phenotype, Population
from tools.algorithms import ParetoOptimization
from tools.checkpointing import continue_from_checkpoint
from tools.utils import make_material_tree, count_occurrences
# sub.call("cp ../_voxcad/voxelyzeMain/voxelyze .", shell=True)
sub.call("cp ~/tmp/research_code/evosoro/_voxcad/voxelyzeMain/voxelyze .", shell=True)
sub.call("chmod 755 voxelyze", shell=True)
SEED = int(sys.argv[1])
MAX_TIME = float(sys.argv[2])
IND_SIZE = (10, 10, 9)
FITNESS_TAG = "<normAbsoluteDisplacement>"
# STOP_IF_BLOCK_TOUCHES_GROUND = True # check for ground penetration
MIN_PERCENT_FULL = 0.5
POP_SIZE = 50
MAX_GENS = 1001
NUM_RANDOM_INDS = 1
INIT_TIME = 1
# diff from main
SIM_TIME = 10.0 + INIT_TIME # was 10+init # includes init time
FREQ = 2
TEMP_AMP = 39.4714242553 # 50% volumetric change with temp_base=25: (1+0.01*(39.4714242553-25))**3-1=0.5
DT_FRAC = 0.9 # 0.3
STIFFNESS = 5e6
GRAV_ACC = -0.1
VOXEL_SIZE = 0.05
# DRAW_SHADOW = True # todo
FLUID_ENV = 1 # if 1 drag forces are added
RHO_FLUID = 1000.0 # water density
C_DRAG = 1.5 # fluid drag associated to a triangular facet
AGGREGATE_DRAG_COEF = 0.5 * C_DRAG * RHO_FLUID # aggregate drag coefficient
TIME_TO_TRY_AGAIN = 25
MAX_EVAL_TIME = 61
SAVE_VXA_EVERY = MAX_GENS + 1
SAVE_LINEAGES = False
CHECKPOINT_EVERY = 1
EXTRA_GENS = 0
RUN_DIR = "run_{}".format(SEED)
RUN_NAME = "AquaticBlockPushers"
def embedded_pill(this_softbot, *args, **kwargs):
mat = make_material_tree(this_softbot, *args, **kwargs)
mat[2:8, 2:8, 2:8] = 3
mat[3:7, 3:7, 3:7] = 0
mat[4:6, 4:6, 4:6] = 8
return mat
class MyGenotype(Genotype):
def __init__(self):
Genotype.__init__(self, orig_size_xyz=IND_SIZE)
self.add_network(DirectEncoding(output_node_name="phase_offset", orig_size_xyz=IND_SIZE, symmetric=False),
freeze=True)
self.to_phenotype_mapping.add_map(name="phase_offset", tag="<PhaseOffset>", logging_stats=None)
self.add_network(CPPN(output_node_names=["shape", "muscleOrTissue"]))
self.to_phenotype_mapping.add_map(name="material", tag="<Data>", func=embedded_pill, output_type=int,
dependency_order=["shape", "muscleOrTissue"], logging_stats=None)
self.to_phenotype_mapping.add_output_dependency(name="shape", dependency_name=None, requirement=None,
material_if_true=None, material_if_false="0")
self.to_phenotype_mapping.add_output_dependency(name="muscleOrTissue", dependency_name="shape",
requirement=True, material_if_true="3", material_if_false="1")
class MyPhenotype(Phenotype):
def is_valid(self, min_percent_full=MIN_PERCENT_FULL):
for name, details in self.genotype.to_phenotype_mapping.items():
if np.isnan(details["state"]).any():
return False
if name == "material":
state = details["state"]
num_vox = np.sum(state > 0)
if num_vox < np.product(self.genotype.orig_size_xyz) * min_percent_full:
return False
if np.sum(state == 3) == 0: # make sure has at least one muscle voxel for movement
return False
return True
if not os.path.isfile("./" + RUN_DIR + "/pickledPops/Gen_0.pickle"):
random.seed(SEED)
np.random.seed(SEED)
my_sim = Sim(dt_frac=DT_FRAC, simulation_time=SIM_TIME, fitness_eval_init_time=INIT_TIME)
my_env = Env(temp_amp=TEMP_AMP, fluid_environment=FLUID_ENV, aggregate_drag_coefficient=AGGREGATE_DRAG_COEF,
lattice_dimension=VOXEL_SIZE, grav_acc=GRAV_ACC, frequency=FREQ, muscle_stiffness=STIFFNESS,
fat_stiffness=STIFFNESS,
# block_position=2, block_material=8, external_block=False,
# falling_prohibited=STOP_IF_BLOCK_TOUCHES_GROUND
)
my_objective_dict = ObjectiveDict()
my_objective_dict.add_objective(name="fitness", maximize=True, tag=FITNESS_TAG)
my_objective_dict.add_objective(name="age", maximize=False, tag=None)
my_objective_dict.add_objective(name="n_muscle", maximize=False, tag=None,
node_func=partial(count_occurrences, keys=[3]),
output_node_name="material")
# logging only:
my_objective_dict.add_objective(name="n_vox", maximize=False, tag=None, logging_only=True,
node_func=partial(count_occurrences, keys=[1, 3]),
output_node_name="material")
my_pop = Population(my_objective_dict, MyGenotype, MyPhenotype, pop_size=POP_SIZE)
my_optimization = ParetoOptimization(my_sim, my_env, my_pop)
my_optimization.run(max_hours_runtime=MAX_TIME, max_gens=MAX_GENS, num_random_individuals=NUM_RANDOM_INDS,
directory=RUN_DIR, name=RUN_NAME, max_eval_time=MAX_EVAL_TIME,
time_to_try_again=TIME_TO_TRY_AGAIN, checkpoint_every=CHECKPOINT_EVERY,
save_vxa_every=SAVE_VXA_EVERY, save_lineages=SAVE_LINEAGES)
else:
continue_from_checkpoint(directory=RUN_DIR, additional_gens=EXTRA_GENS, max_hours_runtime=MAX_TIME,
max_eval_time=MAX_EVAL_TIME, time_to_try_again=TIME_TO_TRY_AGAIN,
checkpoint_every=CHECKPOINT_EVERY, save_vxa_every=SAVE_VXA_EVERY,
save_lineages=SAVE_LINEAGES)
``` |
{
"source": "12yy296/latin2cyrillic-alphabet-converter",
"score": 2
} |
#### File: 12yy296/latin2cyrillic-alphabet-converter/latin2cyrillic.py
```python
test_text="Aa Bb Cc Dd Ee Ff Gg Hh Ii Jj Kk Ll Mm Nn Oo Pp Qq Rr Ss Tt Uu Vv Ww Xx Yy Zz Zhzh Chch Shsh Yoyo Yaya "
replace_table=[
["by","бай"],
["By","Бай"],
["The","Дзе"],
["the","дзе"],
["t'h","дз"],
["T'h","Дз"],
["too","ту"],
["Too","Ту"],
["to","ту"],
["To","Ту"],
["ss","с"],
["cc","кс"],
["Wh","У"],
["pp","п"],
["tt","т"],
["tion","шен"],
["ph","ф"],
["Ph","Ф"],
["q","ку"],
["Q","Ку"],
["zh","ж"],
["Zh","Ж"],
["sh","ш"],
["Sh","Ш"],
["ch","ч"],
["Ch","Ч"],
["you","ю"],
["You","Ю"],
["yo","ю"],
["Yo","Ю"],
["iu","ю"],
["iu","Ю"],
["I ","Я "],
["I'm","Я'м"],
["ya","я"],
["Ya","Я"],
["a","а"],["b","б"],["c","с"],["d","д"],["e","е"],["f","ф"],["g","г"],["h","х"],["i","и"],["j","ж"],["k","к"],["l","л"],["m","м"],["n","н"],["o","о"],["p","п"],["r","р"],["s","с"],["t","т"],["u","у"],["v","в"],["w","у"],["x","с"],["y","й"],["z","з"],["A","А"],["B","Б"],["C","С"],["D","Д"],["E","Е"],["F","Ф"],["G","Г"],["H","Х"],["I","И"],["J","Ж"],["K","К"],["L","Л"],["M","М"],["N","Н"],["O","О"],["P","П"],["R","Р"],["S","С"],["T","Т"],["U","У"],["V","В"],["W","У"],["X","С"],["Y","Й"],["Z","З"],
["''",""],
['"',""],
[" ",""]
]
def main():
cyrillic=input()
for group in replace_table:
cyrillic=cyrillic.replace(group[0],group[1])
print(cyrillic)
if __name__=="__main__":
while True:
main()
``` |
{
"source": "12z/simple-web-framework",
"score": 2
} |
#### File: simple-web-framework/framework/framework.py
```python
from .http_entities import Request
import os
PYCHARM_DEBUG_ENV = 'PYCHARM_DEBUG'
if PYCHARM_DEBUG_ENV in os.environ and os.environ[PYCHARM_DEBUG_ENV] == 'True':
from . import debug
debug.set_debug_environment()
route_mapping = {}
def application(environ, start_response):
verb = environ['REQUEST_METHOD']
path = environ['PATH_INFO']
parameters = get_parameters(environ['QUERY_STRING'])
headers = get_headers(environ)
content_length = int(environ.get('CONTENT_LENGTH', 0))
body = environ['wsgi.input'].read(content_length)
request = Request(verb, path, parameters, headers, body)
response = route_mapping[path](request)
prepared_headers = list(response.headers.items())
start_response(response.status, prepared_headers)
return prepare_body(response.body)
def get_parameters(query_string):
params_dict = {}
for pair in query_string.split('&'):
if not pair:
continue
key, value = tuple(pair.split('='))
params_dict[key] = value
return params_dict
def get_headers(environ):
headers = {k[5:].lower(): v for k, v in environ.items() if k.startswith('HTTP_')}
headers['content_type'] = get_content_type(environ)
headers['content_length'] = get_content_length(environ)
return headers
def get_content_length(environ):
content_type_key = 'CONTENT_LENGTH'
return get_standard_header(content_type_key, environ)
def get_content_type(environ):
content_type_key = 'CONTENT_TYPE'
return get_standard_header(content_type_key, environ)
def get_standard_header(content_type_key, environ):
if content_type_key not in environ.keys():
return None
return environ[content_type_key]
def prepare_body(body):
if isinstance(body, str):
return [body.encode()]
if isinstance(body, bytes):
return body
``` |
{
"source": "1301476057/maltrail",
"score": 3
} |
#### File: maltrail/cccheck/read_data.py
```python
import pandas as pd
import numpy as np
import warnings
import Sentiment_RNN_Solution
warnings.filterwarnings('ignore')
def get_ip_24(ip):
"""
取数据集中的ip地址前24位
:param ip:
:return:
"""
tmp = ip.split('.')
if(len(tmp)<4):
return np.nan
else:
ip_24 = tmp[0] + '.'+tmp[1]+'.'+tmp[2]
return ip_24
def read(path):
data = pd.read_csv(path)
data = data.fillna(" ")
data.query_parameter[(data['query_parameter'] == "0") | (data['query_parameter'] == " ")] = "None=str;1"
data['ip_24'] = data['ip_dst'].apply(lambda x: get_ip_24(x))
data.dropna(inplace=True)
return data
def get_data(data):
data = data.fillna(" ")
data.query_parameter[(data['query_parameter'] == "0") | (data['query_parameter'] == " ")] = "None=str;1"
data['ip_24'] = data['ip_dst'].apply(lambda x: get_ip_24(x))
data.dropna(inplace=True)
return data
```
#### File: maltrail/core/addr.py
```python
import re
from PIL.features import codecs
from numpy import long
def addr_to_int(value):
_ = value.split('.')
return (long(_[0]) << 24) + (long(_[1]) << 16) + (long(_[2]) << 8) + long(_[3])
def int_to_addr(value):
return '.'.join(str(value >> n & 0xff) for n in (24, 16, 8, 0))
def make_mask(bits):
return 0xffffffff ^ (1 << 32 - bits) - 1
def compress_ipv6(address):
zeros = re.findall("(?:0000:)+", address)
if zeros:
address = address.replace(sorted(zeros, key=lambda _: len(_))[-1], ":", 1)
address = re.sub(r"(\A|:)0+(\w)", "\g<1>\g<2>", address)
if address == ":1":
address = "::1"
return address
# Note: socket.inet_ntop not available everywhere (Reference: https://docs.python.org/2/library/socket.html#socket.inet_ntop)
def inet_ntoa6(packed_ip):
try:
if type(packed_ip) == bytes:
packed_ip = packed_ip.decode("utf-8", "ignore")
if type(packed_ip) == dict:
print("!!!!!!!!!!!!!!~~dict~~packed_ip~~ipv6~~", packed_ip)
_ = codecs.encode(packed_ip, "hex")
return compress_ipv6(':'.join(_[i:i + 4] for i in range(0, len(_), 4)))
except Exception:
print(type(packed_ip), packed_ip)
``` |
{
"source": "13015517713/ClockIn",
"score": 3
} |
#### File: 13015517713/ClockIn/Logger.py
```python
import logging
def createLogger(name):
with open("./log.txt","w") as w: # 为了清空内容
pass
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)s %(asctime)s [%(filename)s:%(lineno)d] %(message)s',
datefmt='%Y.%m.%d. %H:%M:%S',
filename="./log.txt")
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG) # 小于Level会被忽略
``` |
{
"source": "13022108937/homework",
"score": 3
} |
#### File: JiaLu/learn/list_training1.py
```python
def test(n):
N = [1]
count = 0
while count < n:
print(N)
N.append(0)
N = [N[i-1] + N[i] for i in range(len(N))]
count += 1
count = int(input(">>> "))
test(count)
```
#### File: JiaLu/learn/list_training9.py
```python
def bubble_search_func(data_list):
cnt_num_all = len(data_list)
for i in range(cnt_num_all-1):
for j in range(1,cnt_num_all-i):
if(data_list[j-1]>data_list[j]):
data_list[j-1],data_list[j]=data_list[j],data_list[j-1]
data_list = [54, 25, 93, 17, 77, 31, 44, 55, 20, 10]
bubble_search_func(data_list)
print(data_list)
```
#### File: P17043-class-leader/self_small_trainning/20181123.py
```python
def cross(n):
result = 1
while n > 1:
result *= n
n -= 1
return result
sum = 0
for i in range(1,6):
sum += cross(i)
print(sum)
sum = 0
a = 1
for i in range(1, 6):
a *= i
sum += a
print(sum)
print(cross(6))
def cross(n):
if n == 1:
return 1
if n >= 1:
return n * cross(n-1)
sum = 0
for i in range(1,6):
sum += cross(i)
print(sum)
```
#### File: P17043-class-leader/self_small_trainning/20181128.py
```python
def add_zeo(b_s,n):
if len(b_s) == n:
for _ in range(8-n):
b_s += '0'
return b_s
s = '10.3.9.12'
l_s = s.split('.')
print(l_s)
for x in l_s:
tmp = int(x)
b_s = ''
while True:
if tmp == 0:
break
b_s += str(tmp % 2)
tmp //= 2
for n in range(1,8):
b_s = add_zeo(b_s, n)
print('{:<2} {:>8}'.format(x,b_s[::-1]))
# 10//2 = 5 0
# 5//2 = 2 1
# 2//2 = 1 0
# 1//2 = 0 1
```
#### File: homework/P17048-jiege/homework1.py
```python
import json
from pathlib import Path
def add(data):
name = input("Please enter new name:")
age = input("Please enter new age:")
tel = input("Please enter nwe tel:")
if name not in data.keys():
data[name] = {"age":age,"tel":tel}
return data
def delete(data):
name = input("Please enter delete username:")
error = "The name is not exist"
prompt = data.pop(name,error)
print(prompt)
return data
def update(data):
names = input("Please enter string format:{name:age:tel}:")
if len(names.split(":")) == 3:
name,age,tel = names.split(":")
if name in data.keys():
data[name] = {"age": age, "tel": tel}
print("The user update successfully! ")
print("name:{}\nage:{}\ntel:{}".format(name, data[name].get('age'), data[name].get('tel')))
else:
print("name is not exit")
else:
print("The format error.")
return data
def find(data):
name = input("Please enter find name:")
if name in data.keys():
print("name:{}\nage:{}\ntel:{}".format(name,data[name].get('age'),data[name].get('tel')))
else:
print("name is not exit")
def list(data):
print("name\tage\ttel")
for k,v in data.items():
print("{}\t{}\t{}".format(k,str(v['age']),str(v['tel'])))
def exit(data,file):
with open(file,'w') as f:
json.dump(data,f)
print("The data saved successfully! exit...")
def main():
filename = "users.json"
path = Path(filename)
if path.is_file():
with open(filename) as f:
users = json.load(f)
if users:
pass
else:
add(users)
else:
users = {}
add(users)
while True:
value = input("Please choose (add|update|find|list|delete|exit):")
if value == "delete":
delete(users)
elif value == "add":
add(users)
elif value == "update":
update(users)
elif value == "find":
find(users)
elif value == "list":
list(users)
elif value == "exit":
exit(users, filename)
break
else:
print("Sorry,Enter error...again check")
value = input("Please choose (add|update|find|list|delete|exit):")
if __name__ == "__main__":
main()
``` |
{
"source": "1302580MK/Udemy_Python",
"score": 4
} |
#### File: Udemy_Python/Lection2/classes.py
```python
class Vehicle:
speed = 0
# ctor
#def __new__(cls):
# return object.__new__(cls)
def __init__(self, speed = 0):
self.speed = speed
print(f"Init speed with {self.speed}")
def IncreaseSpeed(self, increaseAmount):
self.speed += increaseAmount
def __del__(self):
print("Object has been destroyed")
car1 = Vehicle()
car2 = Vehicle()
car1.speed = 10
print(car1.speed)
print(car2.speed)
car2.IncreaseSpeed(13)
print(f"blub: {car2.speed}")
# destroy an object
del car2
print("test")
```
#### File: Udemy_Python/OtherThings/functions.py
```python
def AwesomeFunction():
"This function does stuff"
print("Marc ist toll")
return
AwesomeFunction()
def Funtion2(number1, number2):
"adds numbers"
return number1 + number2
print(Funtion2(2,5))
var1 =5
print(var1)
def ChangeFunction(number1):
var1 = 8 # kein Zugriff
return var1
print(ChangeFunction(var1))
print(var1)
def DefaultArg(var1 = 10):
return var1 *2
print(var1)
print(DefaultArg())
print(DefaultArg(2))
``` |
Subsets and Splits