code
stringlengths 13
1.2M
| order_type
stringclasses 1
value | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
# hi :)
import numpy as np
import random
from copy import deepcopy
# initialization....
# see also prepare.sh
header = np.loadtxt("header.txt", dtype=int)
TIME = header[2]
CARS = header[3]
STARTPOINT = header[4]
GRAPH = np.loadtxt("links.txt",dtype=int)
number_of_links = GRAPH.shape[0]
N = len(GRAPH[:,1])
VOIS=[]
TPS=[]
DIST=[]
AWARD=[]
for i in range(N):
VOIS.append([])
TPS.append([])
DIST.append([])
for i in range(N):
VOIS[GRAPH[i,0]].append(GRAPH[i,1])
TPS[GRAPH[i,0]].append(GRAPH[i,3])
DIST[GRAPH[i,0]].append(GRAPH[i,4])
if GRAPH[i,2] == 2:
VOIS[GRAPH[i,1]].append(GRAPH[i,0])
TPS[GRAPH[i,1]].append(GRAPH[i,3])
DIST[GRAPH[i,1]].append(GRAPH[i,4])
# VOIS[2803] = [1231, 123,123]
# TPS[2803] = [10s, 20s, 30s]
# DIST[2803] = [10m, 200m, 300m]
# the main code
def best_neighbour(current_node, current_cost):
# fix
neighbours = VOIS[current_node]
# filter very costly
good_neighbours_indexes = []
for n in range(len(neighbours)):
if current_cost + TPS[current_node][n] <= TIME:
good_neighbours_indexes.append(n)
if len(good_neighbours_indexes) > 0:
for n in good_neighbours_indexes:
possible_next_node = VOIS[current_node][n]
possible_cost = TPS[current_node][n]
bn = best_neighbour(possible_next_node,
current_cost + possible_next_node)
# awards = [DIST[current_node][ind]
# for ind in good_neighbours_indexes]
# maward = max(awards)
# indexes = [ind for ind in good_neighbours_indexes
# if DIST[current_node][ind] == maward]
best_neighbour_index = random.choice(indexes)
cost = TPS[current_node][best_neighbour_index]
best_neighbour = neighbours[best_neighbour_index]
else:
# error
cost = -100
best_neighbour = -100
return (best_neighbour, cost)
def remove_award(current_node, next_node):
next_node_index = VOIS[current_node].index(next_node)
# the distance will be zero
DIST[current_node][next_node_index] = 0
if current_node in VOIS[next_node]:
current_node_index = VOIS[next_node].index(current_node)
DIST[next_node][current_node_index] = 0
print CARS
# CAR par CAR
for CAR in range(CARS):
visited_nodes = []
current_node = STARTPOINT
current_time = 0
visited_nodes.append(current_node)
while current_time < TIME:
# choose a neighbour
next_node, time = best_neighbour(current_node, current_time)
if next_node == -100:
break
else:
# we was here, so we remove award
remove_award(current_node, next_node)
visited_nodes.append(next_node)
current_node = next_node
current_time = current_time + time
# output for that CAR
# print len(visited_nodes)
print len(visited_nodes)
for n in visited_nodes:
print n
|
normal
|
{
"blob_id": "9a9fdf0f3cfb876a384059f3dcf2508f960168c2",
"index": 2167,
"step-1": "# hi :)\nimport numpy as np\nimport random\nfrom copy import deepcopy\n\n\n# initialization....\n# see also prepare.sh\n\nheader = np.loadtxt(\"header.txt\", dtype=int)\nTIME = header[2]\nCARS = header[3]\nSTARTPOINT = header[4]\n\nGRAPH = np.loadtxt(\"links.txt\",dtype=int)\nnumber_of_links = GRAPH.shape[0]\nN = len(GRAPH[:,1])\n\nVOIS=[]\nTPS=[]\nDIST=[]\nAWARD=[]\nfor i in range(N):\n\tVOIS.append([])\n\tTPS.append([])\n\tDIST.append([])\n\nfor i in range(N):\n\tVOIS[GRAPH[i,0]].append(GRAPH[i,1])\n\tTPS[GRAPH[i,0]].append(GRAPH[i,3])\n\tDIST[GRAPH[i,0]].append(GRAPH[i,4])\n\tif GRAPH[i,2] == 2:\n\t\tVOIS[GRAPH[i,1]].append(GRAPH[i,0])\n\t\tTPS[GRAPH[i,1]].append(GRAPH[i,3])\n\t\tDIST[GRAPH[i,1]].append(GRAPH[i,4])\n\n# VOIS[2803] = [1231, 123,123]\n# TPS[2803] = [10s, 20s, 30s]\n# DIST[2803] = [10m, 200m, 300m]\n\n# the main code\n\ndef best_neighbour(current_node, current_cost):\n # fix \n neighbours = VOIS[current_node]\n # filter very costly\n good_neighbours_indexes = []\n for n in range(len(neighbours)):\n if current_cost + TPS[current_node][n] <= TIME:\n good_neighbours_indexes.append(n)\n\n\n if len(good_neighbours_indexes) > 0:\n for n in good_neighbours_indexes:\n possible_next_node = VOIS[current_node][n]\n possible_cost = TPS[current_node][n]\n bn = best_neighbour(possible_next_node, \n current_cost + possible_next_node)\n\n# awards = [DIST[current_node][ind] \n# for ind in good_neighbours_indexes]\n# maward = max(awards)\n# indexes = [ind for ind in good_neighbours_indexes\n# if DIST[current_node][ind] == maward]\n\n \n \n best_neighbour_index = random.choice(indexes)\n cost = TPS[current_node][best_neighbour_index]\n best_neighbour = neighbours[best_neighbour_index]\n else:\n # error\n cost = -100\n best_neighbour = -100\n return (best_neighbour, cost)\n\ndef remove_award(current_node, next_node):\n next_node_index = VOIS[current_node].index(next_node)\n # the distance will be zero \n DIST[current_node][next_node_index] = 0\n if current_node in VOIS[next_node]:\n current_node_index = VOIS[next_node].index(current_node)\n DIST[next_node][current_node_index] = 0\n\nprint CARS\n# CAR par CAR\nfor CAR in range(CARS):\n visited_nodes = [] \n current_node = STARTPOINT\n current_time = 0\n visited_nodes.append(current_node)\n while current_time < TIME:\n # choose a neighbour\n next_node, time = best_neighbour(current_node, current_time)\n if next_node == -100:\n break\n else:\n # we was here, so we remove award\n remove_award(current_node, next_node)\n visited_nodes.append(next_node)\n current_node = next_node\n current_time = current_time + time\n # output for that CAR\n # print len(visited_nodes)\n print len(visited_nodes)\n for n in visited_nodes:\n print n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from sys import stdin
def IsPrime(x):
for i in range(2, int(x ** 0.5) + 1):
if not x % i:
return False
return True
for x in stdin:
x = x[:-1]
y = x[::-1]
a = IsPrime(int(x))
b = IsPrime(int(y))
if not a:
print("%s is not prime." %x)
elif (a and not b) or (a and x == y):
print("%s is prime." %x)
else:
print("%s is emirp." %x)
|
normal
|
{
"blob_id": "fcfec521e071aa586febc74efb2deb0e9d0a331e",
"index": 3358,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef IsPrime(x):\n for i in range(2, int(x ** 0.5) + 1):\n if not x % i:\n return False\n return True\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef IsPrime(x):\n for i in range(2, int(x ** 0.5) + 1):\n if not x % i:\n return False\n return True\n\n\nfor x in stdin:\n x = x[:-1]\n y = x[::-1]\n a = IsPrime(int(x))\n b = IsPrime(int(y))\n if not a:\n print('%s is not prime.' % x)\n elif a and not b or a and x == y:\n print('%s is prime.' % x)\n else:\n print('%s is emirp.' % x)\n",
"step-4": "from sys import stdin\n\n\ndef IsPrime(x):\n for i in range(2, int(x ** 0.5) + 1):\n if not x % i:\n return False\n return True\n\n\nfor x in stdin:\n x = x[:-1]\n y = x[::-1]\n a = IsPrime(int(x))\n b = IsPrime(int(y))\n if not a:\n print('%s is not prime.' % x)\n elif a and not b or a and x == y:\n print('%s is prime.' % x)\n else:\n print('%s is emirp.' % x)\n",
"step-5": "from sys import stdin\n\ndef IsPrime(x):\n for i in range(2, int(x ** 0.5) + 1):\n if not x % i:\n return False\n \n return True\n\nfor x in stdin:\n x = x[:-1]\n y = x[::-1]\n a = IsPrime(int(x))\n b = IsPrime(int(y))\n if not a:\n print(\"%s is not prime.\" %x)\n elif (a and not b) or (a and x == y):\n print(\"%s is prime.\" %x)\n else:\n print(\"%s is emirp.\" %x)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from pirates.teleport.AreaTeleportActor import AreaTeleportActor
class DoorTeleportActor(AreaTeleportActor):
pass
|
normal
|
{
"blob_id": "b679444fde7cd8eb819443922f37ee54c0f29de4",
"index": 424,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass DoorTeleportActor(AreaTeleportActor):\n pass\n",
"step-3": "from pirates.teleport.AreaTeleportActor import AreaTeleportActor\n\n\nclass DoorTeleportActor(AreaTeleportActor):\n pass\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#-*- coding:UTF-8 -*-
year = int(input('请输入一个年份:'))
"""
if(year % 4) == 0:
if(year % 100) == 0:
if(year % 400) == 0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
else:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
"""
if(year%4)==0 and (year%100)!=0 or (year%400)==0:
print('{0}是润年'.format(year))
else:
print('{0}不是润年'.format(year))
|
normal
|
{
"blob_id": "78178ec8474a3deb876ab7d3950cd427d7a795d5",
"index": 2218,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif year % 4 == 0 and year % 100 != 0 or year % 400 == 0:\n print('{0}是润年'.format(year))\nelse:\n print('{0}不是润年'.format(year))\n",
"step-3": "year = int(input('请输入一个年份:'))\n<mask token>\nif year % 4 == 0 and year % 100 != 0 or year % 400 == 0:\n print('{0}是润年'.format(year))\nelse:\n print('{0}不是润年'.format(year))\n",
"step-4": "#-*- coding:UTF-8 -*- \n\nyear = int(input('请输入一个年份:'))\n\"\"\"\nif(year % 4) == 0:\n if(year % 100) == 0:\n if(year % 400) == 0:\n print('{0}是润年'.format(year))\n else:\n print('{0}不是润年'.format(year))\n else:\n print('{0}是润年'.format(year))\nelse:\n print('{0}不是润年'.format(year)) \n\n\"\"\"\nif(year%4)==0 and (year%100)!=0 or (year%400)==0:\n print('{0}是润年'.format(year))\nelse:\n print('{0}不是润年'.format(year)) \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.urls import path
from redjit.post.views import MyPost, PostView
urlpatterns = [
path('newpost/', MyPost.as_view(), name='newpost')
path('subredjit/<subredjit>/<post_id>/', PostView.as_view(), name='post')
]
|
normal
|
{
"blob_id": "e0fc7e5771f6cb8e0638bc8c9549cfe1a92d3d82",
"index": 8719,
"step-1": "from django.urls import path\nfrom redjit.post.views import MyPost, PostView\n\n\n\nurlpatterns = [\n path('newpost/', MyPost.as_view(), name='newpost')\n path('subredjit/<subredjit>/<post_id>/', PostView.as_view(), name='post')\n]",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# For better usage on ddp
import torch
from pytorch_lightning.metrics import Metric
import cv2
import numpy as np
import skimage
import torch.tensor as Tensor
class SegMetric(Metric):
def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):
super().__init__(dist_sync_on_step=dist_sync_on_step)
# call `self.add_state`for every internal state that is needed for the metrics computations
# dist_reduce_fx indicates the function that should be used to reduce
# state from multiple processes
self.iou_thr = iou_thr
self.prob_thr = prob_thr
self.img_size = img_size
self.use_ddp = dist_sync_on_step
self.add_state("csv_files", default=[], dist_reduce_fx="cat")
def update(self, preds: torch.Tensor, target: torch.Tensor):
logit_seg, _ = preds
_, mask, mask_cls, _, img_path, _ = target
assert logit_seg.shape == mask.shape
pred_seg = torch.sigmoid(logit_seg).detach().cpu().numpy()
gt_seg = mask.detach().cpu().numpy()
gt_cls = mask_cls.detach().cpu().numpy()[:, 0].tolist()
pred_seg = pred_seg.astype("float32")
for idx, file_path in enumerate(img_path):
pred = cv2.resize(pred_seg[idx][0], (self.img_size, self.img_size))
pred = np.expand_dims(pred, 0)
gt = cv2.resize(
gt_seg[idx][0],
(self.img_size, self.img_size),
interpolation=cv2.INTER_NEAREST,
)
gt = np.expand_dims(gt, 0)
gt_c = gt_cls[idx]
is_p = int(gt_c == 1.0)
is_n = 1 - is_p
gt_nums_, pred_nums_, tp_nums_, fp_nums_ = evaluation(
pred, gt, iou_th=self.iou_thr, prob_ths=[self.prob_thr]
)
# csv = file_path.split("/")[5]
csv = file_path.split("png_1024/")[1].split("/")[0]
if not hasattr(self, f"{csv}_gt"):
self.csv_files += [csv]
self.add_state(f"{csv}_gt", default=Tensor(0), dist_reduce_fx="sum")
self.add_state(f"{csv}_pred", default=Tensor(0), dist_reduce_fx="sum")
self.add_state(f"{csv}_tp", default=Tensor(0), dist_reduce_fx="sum")
self.add_state(f"{csv}_fp", default=Tensor(0), dist_reduce_fx="sum")
self.add_state(f"{csv}_pos", default=Tensor(0), dist_reduce_fx="sum")
self.add_state(
f"{csv}_neg", default=torch.tensor(0), dist_reduce_fx="sum"
)
# TODO: Need to be change if num_class > 1
# FIXME: 몬 생긴 포맷..
setattr(self, f"{csv}_gt", getattr(self, f"{csv}_gt") + gt_nums_[0])
setattr(
self, f"{csv}_pred", getattr(self, f"{csv}_pred") + pred_nums_[0, 0]
)
setattr(self, f"{csv}_tp", getattr(self, f"{csv}_tp") + tp_nums_[0, 0])
setattr(self, f"{csv}_fp", getattr(self, f"{csv}_fp") + fp_nums_[0, 0])
setattr(self, f"{csv}_pos", getattr(self, f"{csv}_pos") + is_p)
setattr(self, f"{csv}_neg", getattr(self, f"{csv}_neg") + is_n)
def update_each(self, preds: torch.Tensor, target: torch.Tensor):
self.update(preds, target)
def compute(self):
gt = 0
tp = 0
fp = 0
pos = 0
neg = 0
for csv in self.csv_files:
gt += getattr(self, f"{csv}_gt").item()
tp += getattr(self, f"{csv}_tp").item()
fp += getattr(self, f"{csv}_fp").item()
pos += getattr(self, f"{csv}_pos").item()
neg += getattr(self, f"{csv}_neg").item()
pre = tp / (tp + fp * (pos / (neg + 1e-5)) + 1e-5)
rec = tp / (gt + 1e-5)
f1 = 2 * (pre * rec) / (pre + rec + 1e-5)
myf1 = (pre + rec) / 2.0
lesion_metric_dict = {
"pre": pre,
"rec": rec,
"f1": f1,
"myf1": myf1,
}
# FIXME: DDP Error: https://github.com/PyTorchLightning/pytorch-lightning/discussions/2529
# Tensors must be CUDA and dense
# if self.use_ddp:
# lesion_metric_dict = torch.FloatTensor([myf1], device=self.device)
return lesion_metric_dict
def compute_each(self):
metric_dict_each_csv = {}
for csv in self.csv_files:
gt = getattr(self, f"{csv}_gt").item()
tp = getattr(self, f"{csv}_tp").item()
fp = getattr(self, f"{csv}_fp").item()
pos = getattr(self, f"{csv}_pos").item()
neg = getattr(self, f"{csv}_neg").item()
pre = tp / (tp + fp * (pos / (neg + 1e-5)) + 1e-5)
rec = tp / (gt + 1e-5)
f1 = 2 * (pre * rec) / (pre + rec + 1e-5)
fppi = fp / (pos + neg + 1e-5)
# myf1 = (pre + rec) / 2.0
lesion_metric_dict = {
"gt": gt,
"pos": pos,
"neg": neg,
"pre": pre,
"rec": rec,
"f1": f1,
"fppi": fppi
# "myf1": myf1,
}
metric_dict_each_csv[csv] = lesion_metric_dict
return metric_dict_each_csv
# Helper functions
def calc_iou(bbox_a, bbox_b):
"""
:param a: bbox list [min_y, min_x, max_y, max_x]
:param b: bbox list [min_y, min_x, max_y, max_x]
:return:
"""
size_a = (bbox_a[2] - bbox_a[0]) * (bbox_a[3] - bbox_a[1])
size_b = (bbox_b[2] - bbox_b[0]) * (bbox_b[3] - bbox_b[1])
min_ab_y = max(bbox_a[0], bbox_b[0])
min_ab_x = max(bbox_a[1], bbox_b[1])
max_ab_y = min(bbox_a[2], bbox_b[2])
max_ab_x = min(bbox_a[3], bbox_b[3])
inter_ab = max(0, max_ab_y - min_ab_y) * max(0, max_ab_x - min_ab_x)
return inter_ab / (size_a + size_b - inter_ab)
def evaluation(pred, gt, iou_th=0.15, prob_ths=[0.5]):
"""
:param pred: Prediction Seg Map, shape = (1, num_classes, height, width)
:param gt: Ground-truth Seg Map, shape = (1, num_classes, height, width)
:param iou_th: Threshold for prediction and gt matching
:return:
gt_nums: Ground-truth region numbers
pred_nums: Prediction region numbers
tp_nums: True Positive region numbers
fp_nums: False Positive region numbers
# 필수 가정: batch_size=1 (regionprops 함수가 2차원 행렬에만 적용 가능함)
# Region을 고려에서 제외하는 경우(2048x2048 이미지 기반, pixel spacing=0.2mm)
# i) Region bbox 크기 < 400 pixels
# ii) (현재 사용x) Region bbox 장축<4mm(20pixels), 단축<2mm(10 pixels)
# issue: # 3. 영상사이즈는 디텍터 크기에 따라 달라질 수 있습니다. 완벽히 하기 위해선 pixel spacing 정보를 받아야 합니다.
# # 따라서 영상 크기에 대해 기준이 변경되는 것은 현단계에서는 적용할 필요가 없어 보입니다.
"""
if len(pred.shape) > 3:
pred = pred[0]
gt = gt[0]
num_classes = pred.shape[0]
image_size = gt.shape[2]
gt_regions = [
skimage.measure.regionprops(skimage.measure.label(gt[c, :, :]))
for c in range(num_classes)
]
for c in range(num_classes):
gt_regions[c] = [
r for r in gt_regions[c] if r.area > (20 * (image_size / 2048)) ** 2
]
pred_regions = [
[
skimage.measure.regionprops(skimage.measure.label(pred[c, :, :] > th))
for c in range(num_classes)
]
for th in prob_ths
] # shape - len(prob_th), num_classes
# 초기화
gt_nums = np.array([len(gt_regions[c]) for c in range(num_classes)])
pred_nums = np.array(
[
[len(pred_regions[thi][c]) for c in range(num_classes)]
for thi in range(len(prob_ths))
]
)
tp_nums = np.zeros((len(prob_ths), num_classes))
fp_nums = pred_nums.copy() # .copy() 없으면 포인터가 같아짐
# Gt-Pred Bbox Iou Matrix
for c in range(num_classes):
for thi in range(len(prob_ths)):
if (gt_nums[c] == 0) or (pred_nums[thi][c] == 0): # np array 이상함;
continue
iou_matrix = np.zeros((gt_nums[c], pred_nums[thi][c]))
for gi, gr in enumerate(gt_regions[c]):
for pi, pr in enumerate(pred_regions[thi][c]):
iou_matrix[gi, pi] = calc_iou(gr.bbox, pr.bbox)
tp_nums[thi][c] = np.sum(np.any((iou_matrix >= iou_th), axis=1))
fp_nums[thi][c] -= np.sum(np.any((iou_matrix > iou_th), axis=0))
return gt_nums, pred_nums, tp_nums, fp_nums
|
normal
|
{
"blob_id": "8d3f8872a3d5c4351551dc2d46839763d28ebd70",
"index": 3586,
"step-1": "<mask token>\n\n\nclass SegMetric(Metric):\n\n def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):\n super().__init__(dist_sync_on_step=dist_sync_on_step)\n self.iou_thr = iou_thr\n self.prob_thr = prob_thr\n self.img_size = img_size\n self.use_ddp = dist_sync_on_step\n self.add_state('csv_files', default=[], dist_reduce_fx='cat')\n <mask token>\n\n def update_each(self, preds: torch.Tensor, target: torch.Tensor):\n self.update(preds, target)\n\n def compute(self):\n gt = 0\n tp = 0\n fp = 0\n pos = 0\n neg = 0\n for csv in self.csv_files:\n gt += getattr(self, f'{csv}_gt').item()\n tp += getattr(self, f'{csv}_tp').item()\n fp += getattr(self, f'{csv}_fp').item()\n pos += getattr(self, f'{csv}_pos').item()\n neg += getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n myf1 = (pre + rec) / 2.0\n lesion_metric_dict = {'pre': pre, 'rec': rec, 'f1': f1, 'myf1': myf1}\n return lesion_metric_dict\n\n def compute_each(self):\n metric_dict_each_csv = {}\n for csv in self.csv_files:\n gt = getattr(self, f'{csv}_gt').item()\n tp = getattr(self, f'{csv}_tp').item()\n fp = getattr(self, f'{csv}_fp').item()\n pos = getattr(self, f'{csv}_pos').item()\n neg = getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n fppi = fp / (pos + neg + 1e-05)\n lesion_metric_dict = {'gt': gt, 'pos': pos, 'neg': neg, 'pre':\n pre, 'rec': rec, 'f1': f1, 'fppi': fppi}\n metric_dict_each_csv[csv] = lesion_metric_dict\n return metric_dict_each_csv\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass SegMetric(Metric):\n\n def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):\n super().__init__(dist_sync_on_step=dist_sync_on_step)\n self.iou_thr = iou_thr\n self.prob_thr = prob_thr\n self.img_size = img_size\n self.use_ddp = dist_sync_on_step\n self.add_state('csv_files', default=[], dist_reduce_fx='cat')\n\n def update(self, preds: torch.Tensor, target: torch.Tensor):\n logit_seg, _ = preds\n _, mask, mask_cls, _, img_path, _ = target\n assert logit_seg.shape == mask.shape\n pred_seg = torch.sigmoid(logit_seg).detach().cpu().numpy()\n gt_seg = mask.detach().cpu().numpy()\n gt_cls = mask_cls.detach().cpu().numpy()[:, 0].tolist()\n pred_seg = pred_seg.astype('float32')\n for idx, file_path in enumerate(img_path):\n pred = cv2.resize(pred_seg[idx][0], (self.img_size, self.img_size))\n pred = np.expand_dims(pred, 0)\n gt = cv2.resize(gt_seg[idx][0], (self.img_size, self.img_size),\n interpolation=cv2.INTER_NEAREST)\n gt = np.expand_dims(gt, 0)\n gt_c = gt_cls[idx]\n is_p = int(gt_c == 1.0)\n is_n = 1 - is_p\n gt_nums_, pred_nums_, tp_nums_, fp_nums_ = evaluation(pred, gt,\n iou_th=self.iou_thr, prob_ths=[self.prob_thr])\n csv = file_path.split('png_1024/')[1].split('/')[0]\n if not hasattr(self, f'{csv}_gt'):\n self.csv_files += [csv]\n self.add_state(f'{csv}_gt', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pred', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_tp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_fp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pos', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_neg', default=torch.tensor(0),\n dist_reduce_fx='sum')\n setattr(self, f'{csv}_gt', getattr(self, f'{csv}_gt') + gt_nums_[0]\n )\n setattr(self, f'{csv}_pred', getattr(self, f'{csv}_pred') +\n pred_nums_[0, 0])\n setattr(self, f'{csv}_tp', getattr(self, f'{csv}_tp') +\n tp_nums_[0, 0])\n setattr(self, f'{csv}_fp', getattr(self, f'{csv}_fp') +\n fp_nums_[0, 0])\n setattr(self, f'{csv}_pos', getattr(self, f'{csv}_pos') + is_p)\n setattr(self, f'{csv}_neg', getattr(self, f'{csv}_neg') + is_n)\n\n def update_each(self, preds: torch.Tensor, target: torch.Tensor):\n self.update(preds, target)\n\n def compute(self):\n gt = 0\n tp = 0\n fp = 0\n pos = 0\n neg = 0\n for csv in self.csv_files:\n gt += getattr(self, f'{csv}_gt').item()\n tp += getattr(self, f'{csv}_tp').item()\n fp += getattr(self, f'{csv}_fp').item()\n pos += getattr(self, f'{csv}_pos').item()\n neg += getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n myf1 = (pre + rec) / 2.0\n lesion_metric_dict = {'pre': pre, 'rec': rec, 'f1': f1, 'myf1': myf1}\n return lesion_metric_dict\n\n def compute_each(self):\n metric_dict_each_csv = {}\n for csv in self.csv_files:\n gt = getattr(self, f'{csv}_gt').item()\n tp = getattr(self, f'{csv}_tp').item()\n fp = getattr(self, f'{csv}_fp').item()\n pos = getattr(self, f'{csv}_pos').item()\n neg = getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n fppi = fp / (pos + neg + 1e-05)\n lesion_metric_dict = {'gt': gt, 'pos': pos, 'neg': neg, 'pre':\n pre, 'rec': rec, 'f1': f1, 'fppi': fppi}\n metric_dict_each_csv[csv] = lesion_metric_dict\n return metric_dict_each_csv\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass SegMetric(Metric):\n\n def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):\n super().__init__(dist_sync_on_step=dist_sync_on_step)\n self.iou_thr = iou_thr\n self.prob_thr = prob_thr\n self.img_size = img_size\n self.use_ddp = dist_sync_on_step\n self.add_state('csv_files', default=[], dist_reduce_fx='cat')\n\n def update(self, preds: torch.Tensor, target: torch.Tensor):\n logit_seg, _ = preds\n _, mask, mask_cls, _, img_path, _ = target\n assert logit_seg.shape == mask.shape\n pred_seg = torch.sigmoid(logit_seg).detach().cpu().numpy()\n gt_seg = mask.detach().cpu().numpy()\n gt_cls = mask_cls.detach().cpu().numpy()[:, 0].tolist()\n pred_seg = pred_seg.astype('float32')\n for idx, file_path in enumerate(img_path):\n pred = cv2.resize(pred_seg[idx][0], (self.img_size, self.img_size))\n pred = np.expand_dims(pred, 0)\n gt = cv2.resize(gt_seg[idx][0], (self.img_size, self.img_size),\n interpolation=cv2.INTER_NEAREST)\n gt = np.expand_dims(gt, 0)\n gt_c = gt_cls[idx]\n is_p = int(gt_c == 1.0)\n is_n = 1 - is_p\n gt_nums_, pred_nums_, tp_nums_, fp_nums_ = evaluation(pred, gt,\n iou_th=self.iou_thr, prob_ths=[self.prob_thr])\n csv = file_path.split('png_1024/')[1].split('/')[0]\n if not hasattr(self, f'{csv}_gt'):\n self.csv_files += [csv]\n self.add_state(f'{csv}_gt', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pred', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_tp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_fp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pos', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_neg', default=torch.tensor(0),\n dist_reduce_fx='sum')\n setattr(self, f'{csv}_gt', getattr(self, f'{csv}_gt') + gt_nums_[0]\n )\n setattr(self, f'{csv}_pred', getattr(self, f'{csv}_pred') +\n pred_nums_[0, 0])\n setattr(self, f'{csv}_tp', getattr(self, f'{csv}_tp') +\n tp_nums_[0, 0])\n setattr(self, f'{csv}_fp', getattr(self, f'{csv}_fp') +\n fp_nums_[0, 0])\n setattr(self, f'{csv}_pos', getattr(self, f'{csv}_pos') + is_p)\n setattr(self, f'{csv}_neg', getattr(self, f'{csv}_neg') + is_n)\n\n def update_each(self, preds: torch.Tensor, target: torch.Tensor):\n self.update(preds, target)\n\n def compute(self):\n gt = 0\n tp = 0\n fp = 0\n pos = 0\n neg = 0\n for csv in self.csv_files:\n gt += getattr(self, f'{csv}_gt').item()\n tp += getattr(self, f'{csv}_tp').item()\n fp += getattr(self, f'{csv}_fp').item()\n pos += getattr(self, f'{csv}_pos').item()\n neg += getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n myf1 = (pre + rec) / 2.0\n lesion_metric_dict = {'pre': pre, 'rec': rec, 'f1': f1, 'myf1': myf1}\n return lesion_metric_dict\n\n def compute_each(self):\n metric_dict_each_csv = {}\n for csv in self.csv_files:\n gt = getattr(self, f'{csv}_gt').item()\n tp = getattr(self, f'{csv}_tp').item()\n fp = getattr(self, f'{csv}_fp').item()\n pos = getattr(self, f'{csv}_pos').item()\n neg = getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n fppi = fp / (pos + neg + 1e-05)\n lesion_metric_dict = {'gt': gt, 'pos': pos, 'neg': neg, 'pre':\n pre, 'rec': rec, 'f1': f1, 'fppi': fppi}\n metric_dict_each_csv[csv] = lesion_metric_dict\n return metric_dict_each_csv\n\n\ndef calc_iou(bbox_a, bbox_b):\n \"\"\"\n :param a: bbox list [min_y, min_x, max_y, max_x]\n :param b: bbox list [min_y, min_x, max_y, max_x]\n :return:\n \"\"\"\n size_a = (bbox_a[2] - bbox_a[0]) * (bbox_a[3] - bbox_a[1])\n size_b = (bbox_b[2] - bbox_b[0]) * (bbox_b[3] - bbox_b[1])\n min_ab_y = max(bbox_a[0], bbox_b[0])\n min_ab_x = max(bbox_a[1], bbox_b[1])\n max_ab_y = min(bbox_a[2], bbox_b[2])\n max_ab_x = min(bbox_a[3], bbox_b[3])\n inter_ab = max(0, max_ab_y - min_ab_y) * max(0, max_ab_x - min_ab_x)\n return inter_ab / (size_a + size_b - inter_ab)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass SegMetric(Metric):\n\n def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):\n super().__init__(dist_sync_on_step=dist_sync_on_step)\n self.iou_thr = iou_thr\n self.prob_thr = prob_thr\n self.img_size = img_size\n self.use_ddp = dist_sync_on_step\n self.add_state('csv_files', default=[], dist_reduce_fx='cat')\n\n def update(self, preds: torch.Tensor, target: torch.Tensor):\n logit_seg, _ = preds\n _, mask, mask_cls, _, img_path, _ = target\n assert logit_seg.shape == mask.shape\n pred_seg = torch.sigmoid(logit_seg).detach().cpu().numpy()\n gt_seg = mask.detach().cpu().numpy()\n gt_cls = mask_cls.detach().cpu().numpy()[:, 0].tolist()\n pred_seg = pred_seg.astype('float32')\n for idx, file_path in enumerate(img_path):\n pred = cv2.resize(pred_seg[idx][0], (self.img_size, self.img_size))\n pred = np.expand_dims(pred, 0)\n gt = cv2.resize(gt_seg[idx][0], (self.img_size, self.img_size),\n interpolation=cv2.INTER_NEAREST)\n gt = np.expand_dims(gt, 0)\n gt_c = gt_cls[idx]\n is_p = int(gt_c == 1.0)\n is_n = 1 - is_p\n gt_nums_, pred_nums_, tp_nums_, fp_nums_ = evaluation(pred, gt,\n iou_th=self.iou_thr, prob_ths=[self.prob_thr])\n csv = file_path.split('png_1024/')[1].split('/')[0]\n if not hasattr(self, f'{csv}_gt'):\n self.csv_files += [csv]\n self.add_state(f'{csv}_gt', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pred', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_tp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_fp', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_pos', default=Tensor(0),\n dist_reduce_fx='sum')\n self.add_state(f'{csv}_neg', default=torch.tensor(0),\n dist_reduce_fx='sum')\n setattr(self, f'{csv}_gt', getattr(self, f'{csv}_gt') + gt_nums_[0]\n )\n setattr(self, f'{csv}_pred', getattr(self, f'{csv}_pred') +\n pred_nums_[0, 0])\n setattr(self, f'{csv}_tp', getattr(self, f'{csv}_tp') +\n tp_nums_[0, 0])\n setattr(self, f'{csv}_fp', getattr(self, f'{csv}_fp') +\n fp_nums_[0, 0])\n setattr(self, f'{csv}_pos', getattr(self, f'{csv}_pos') + is_p)\n setattr(self, f'{csv}_neg', getattr(self, f'{csv}_neg') + is_n)\n\n def update_each(self, preds: torch.Tensor, target: torch.Tensor):\n self.update(preds, target)\n\n def compute(self):\n gt = 0\n tp = 0\n fp = 0\n pos = 0\n neg = 0\n for csv in self.csv_files:\n gt += getattr(self, f'{csv}_gt').item()\n tp += getattr(self, f'{csv}_tp').item()\n fp += getattr(self, f'{csv}_fp').item()\n pos += getattr(self, f'{csv}_pos').item()\n neg += getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n myf1 = (pre + rec) / 2.0\n lesion_metric_dict = {'pre': pre, 'rec': rec, 'f1': f1, 'myf1': myf1}\n return lesion_metric_dict\n\n def compute_each(self):\n metric_dict_each_csv = {}\n for csv in self.csv_files:\n gt = getattr(self, f'{csv}_gt').item()\n tp = getattr(self, f'{csv}_tp').item()\n fp = getattr(self, f'{csv}_fp').item()\n pos = getattr(self, f'{csv}_pos').item()\n neg = getattr(self, f'{csv}_neg').item()\n pre = tp / (tp + fp * (pos / (neg + 1e-05)) + 1e-05)\n rec = tp / (gt + 1e-05)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-05)\n fppi = fp / (pos + neg + 1e-05)\n lesion_metric_dict = {'gt': gt, 'pos': pos, 'neg': neg, 'pre':\n pre, 'rec': rec, 'f1': f1, 'fppi': fppi}\n metric_dict_each_csv[csv] = lesion_metric_dict\n return metric_dict_each_csv\n\n\ndef calc_iou(bbox_a, bbox_b):\n \"\"\"\n :param a: bbox list [min_y, min_x, max_y, max_x]\n :param b: bbox list [min_y, min_x, max_y, max_x]\n :return:\n \"\"\"\n size_a = (bbox_a[2] - bbox_a[0]) * (bbox_a[3] - bbox_a[1])\n size_b = (bbox_b[2] - bbox_b[0]) * (bbox_b[3] - bbox_b[1])\n min_ab_y = max(bbox_a[0], bbox_b[0])\n min_ab_x = max(bbox_a[1], bbox_b[1])\n max_ab_y = min(bbox_a[2], bbox_b[2])\n max_ab_x = min(bbox_a[3], bbox_b[3])\n inter_ab = max(0, max_ab_y - min_ab_y) * max(0, max_ab_x - min_ab_x)\n return inter_ab / (size_a + size_b - inter_ab)\n\n\ndef evaluation(pred, gt, iou_th=0.15, prob_ths=[0.5]):\n \"\"\"\n :param pred: Prediction Seg Map, shape = (1, num_classes, height, width)\n :param gt: Ground-truth Seg Map, shape = (1, num_classes, height, width)\n :param iou_th: Threshold for prediction and gt matching\n :return:\n gt_nums: Ground-truth region numbers\n pred_nums: Prediction region numbers\n tp_nums: True Positive region numbers\n fp_nums: False Positive region numbers\n # 필수 가정: batch_size=1 (regionprops 함수가 2차원 행렬에만 적용 가능함)\n # Region을 고려에서 제외하는 경우(2048x2048 이미지 기반, pixel spacing=0.2mm)\n # i) Region bbox 크기 < 400 pixels\n # ii) (현재 사용x) Region bbox 장축<4mm(20pixels), 단축<2mm(10 pixels)\n # issue: # 3. 영상사이즈는 디텍터 크기에 따라 달라질 수 있습니다. 완벽히 하기 위해선 pixel spacing 정보를 받아야 합니다.\n # # 따라서 영상 크기에 대해 기준이 변경되는 것은 현단계에서는 적용할 필요가 없어 보입니다.\n \"\"\"\n if len(pred.shape) > 3:\n pred = pred[0]\n gt = gt[0]\n num_classes = pred.shape[0]\n image_size = gt.shape[2]\n gt_regions = [skimage.measure.regionprops(skimage.measure.label(gt[c, :,\n :])) for c in range(num_classes)]\n for c in range(num_classes):\n gt_regions[c] = [r for r in gt_regions[c] if r.area > (20 * (\n image_size / 2048)) ** 2]\n pred_regions = [[skimage.measure.regionprops(skimage.measure.label(pred\n [c, :, :] > th)) for c in range(num_classes)] for th in prob_ths]\n gt_nums = np.array([len(gt_regions[c]) for c in range(num_classes)])\n pred_nums = np.array([[len(pred_regions[thi][c]) for c in range(\n num_classes)] for thi in range(len(prob_ths))])\n tp_nums = np.zeros((len(prob_ths), num_classes))\n fp_nums = pred_nums.copy()\n for c in range(num_classes):\n for thi in range(len(prob_ths)):\n if gt_nums[c] == 0 or pred_nums[thi][c] == 0:\n continue\n iou_matrix = np.zeros((gt_nums[c], pred_nums[thi][c]))\n for gi, gr in enumerate(gt_regions[c]):\n for pi, pr in enumerate(pred_regions[thi][c]):\n iou_matrix[gi, pi] = calc_iou(gr.bbox, pr.bbox)\n tp_nums[thi][c] = np.sum(np.any(iou_matrix >= iou_th, axis=1))\n fp_nums[thi][c] -= np.sum(np.any(iou_matrix > iou_th, axis=0))\n return gt_nums, pred_nums, tp_nums, fp_nums\n",
"step-5": "# For better usage on ddp\n\nimport torch\nfrom pytorch_lightning.metrics import Metric\nimport cv2\nimport numpy as np\nimport skimage\nimport torch.tensor as Tensor\n\n\nclass SegMetric(Metric):\n def __init__(self, iou_thr, prob_thr, img_size, dist_sync_on_step=False):\n super().__init__(dist_sync_on_step=dist_sync_on_step)\n # call `self.add_state`for every internal state that is needed for the metrics computations\n # dist_reduce_fx indicates the function that should be used to reduce\n # state from multiple processes\n self.iou_thr = iou_thr\n self.prob_thr = prob_thr\n self.img_size = img_size\n self.use_ddp = dist_sync_on_step\n self.add_state(\"csv_files\", default=[], dist_reduce_fx=\"cat\")\n\n def update(self, preds: torch.Tensor, target: torch.Tensor):\n logit_seg, _ = preds\n _, mask, mask_cls, _, img_path, _ = target\n\n assert logit_seg.shape == mask.shape\n\n pred_seg = torch.sigmoid(logit_seg).detach().cpu().numpy()\n gt_seg = mask.detach().cpu().numpy()\n gt_cls = mask_cls.detach().cpu().numpy()[:, 0].tolist()\n\n pred_seg = pred_seg.astype(\"float32\")\n for idx, file_path in enumerate(img_path):\n pred = cv2.resize(pred_seg[idx][0], (self.img_size, self.img_size))\n pred = np.expand_dims(pred, 0)\n gt = cv2.resize(\n gt_seg[idx][0],\n (self.img_size, self.img_size),\n interpolation=cv2.INTER_NEAREST,\n )\n gt = np.expand_dims(gt, 0)\n\n gt_c = gt_cls[idx]\n is_p = int(gt_c == 1.0)\n is_n = 1 - is_p\n\n gt_nums_, pred_nums_, tp_nums_, fp_nums_ = evaluation(\n pred, gt, iou_th=self.iou_thr, prob_ths=[self.prob_thr]\n )\n\n # csv = file_path.split(\"/\")[5]\n csv = file_path.split(\"png_1024/\")[1].split(\"/\")[0]\n if not hasattr(self, f\"{csv}_gt\"):\n self.csv_files += [csv]\n self.add_state(f\"{csv}_gt\", default=Tensor(0), dist_reduce_fx=\"sum\")\n self.add_state(f\"{csv}_pred\", default=Tensor(0), dist_reduce_fx=\"sum\")\n self.add_state(f\"{csv}_tp\", default=Tensor(0), dist_reduce_fx=\"sum\")\n self.add_state(f\"{csv}_fp\", default=Tensor(0), dist_reduce_fx=\"sum\")\n self.add_state(f\"{csv}_pos\", default=Tensor(0), dist_reduce_fx=\"sum\")\n self.add_state(\n f\"{csv}_neg\", default=torch.tensor(0), dist_reduce_fx=\"sum\"\n )\n\n # TODO: Need to be change if num_class > 1\n # FIXME: 몬 생긴 포맷..\n setattr(self, f\"{csv}_gt\", getattr(self, f\"{csv}_gt\") + gt_nums_[0])\n setattr(\n self, f\"{csv}_pred\", getattr(self, f\"{csv}_pred\") + pred_nums_[0, 0]\n )\n setattr(self, f\"{csv}_tp\", getattr(self, f\"{csv}_tp\") + tp_nums_[0, 0])\n setattr(self, f\"{csv}_fp\", getattr(self, f\"{csv}_fp\") + fp_nums_[0, 0])\n setattr(self, f\"{csv}_pos\", getattr(self, f\"{csv}_pos\") + is_p)\n setattr(self, f\"{csv}_neg\", getattr(self, f\"{csv}_neg\") + is_n)\n\n def update_each(self, preds: torch.Tensor, target: torch.Tensor):\n self.update(preds, target)\n\n def compute(self):\n gt = 0\n tp = 0\n fp = 0\n pos = 0\n neg = 0\n for csv in self.csv_files:\n gt += getattr(self, f\"{csv}_gt\").item()\n tp += getattr(self, f\"{csv}_tp\").item()\n fp += getattr(self, f\"{csv}_fp\").item()\n pos += getattr(self, f\"{csv}_pos\").item()\n neg += getattr(self, f\"{csv}_neg\").item()\n\n pre = tp / (tp + fp * (pos / (neg + 1e-5)) + 1e-5)\n rec = tp / (gt + 1e-5)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-5)\n myf1 = (pre + rec) / 2.0\n\n lesion_metric_dict = {\n \"pre\": pre,\n \"rec\": rec,\n \"f1\": f1,\n \"myf1\": myf1,\n }\n\n # FIXME: DDP Error: https://github.com/PyTorchLightning/pytorch-lightning/discussions/2529\n # Tensors must be CUDA and dense\n # if self.use_ddp:\n # lesion_metric_dict = torch.FloatTensor([myf1], device=self.device)\n\n return lesion_metric_dict\n\n def compute_each(self):\n metric_dict_each_csv = {}\n for csv in self.csv_files:\n gt = getattr(self, f\"{csv}_gt\").item()\n tp = getattr(self, f\"{csv}_tp\").item()\n fp = getattr(self, f\"{csv}_fp\").item()\n pos = getattr(self, f\"{csv}_pos\").item()\n neg = getattr(self, f\"{csv}_neg\").item()\n\n pre = tp / (tp + fp * (pos / (neg + 1e-5)) + 1e-5)\n rec = tp / (gt + 1e-5)\n f1 = 2 * (pre * rec) / (pre + rec + 1e-5)\n fppi = fp / (pos + neg + 1e-5)\n # myf1 = (pre + rec) / 2.0\n\n lesion_metric_dict = {\n \"gt\": gt,\n \"pos\": pos,\n \"neg\": neg,\n \"pre\": pre,\n \"rec\": rec,\n \"f1\": f1,\n \"fppi\": fppi\n # \"myf1\": myf1,\n }\n\n metric_dict_each_csv[csv] = lesion_metric_dict\n\n return metric_dict_each_csv\n\n\n# Helper functions\ndef calc_iou(bbox_a, bbox_b):\n \"\"\"\n :param a: bbox list [min_y, min_x, max_y, max_x]\n :param b: bbox list [min_y, min_x, max_y, max_x]\n :return:\n \"\"\"\n size_a = (bbox_a[2] - bbox_a[0]) * (bbox_a[3] - bbox_a[1])\n size_b = (bbox_b[2] - bbox_b[0]) * (bbox_b[3] - bbox_b[1])\n\n min_ab_y = max(bbox_a[0], bbox_b[0])\n min_ab_x = max(bbox_a[1], bbox_b[1])\n max_ab_y = min(bbox_a[2], bbox_b[2])\n max_ab_x = min(bbox_a[3], bbox_b[3])\n\n inter_ab = max(0, max_ab_y - min_ab_y) * max(0, max_ab_x - min_ab_x)\n\n return inter_ab / (size_a + size_b - inter_ab)\n\n\ndef evaluation(pred, gt, iou_th=0.15, prob_ths=[0.5]):\n \"\"\"\n :param pred: Prediction Seg Map, shape = (1, num_classes, height, width)\n :param gt: Ground-truth Seg Map, shape = (1, num_classes, height, width)\n :param iou_th: Threshold for prediction and gt matching\n :return:\n gt_nums: Ground-truth region numbers\n pred_nums: Prediction region numbers\n tp_nums: True Positive region numbers\n fp_nums: False Positive region numbers\n # 필수 가정: batch_size=1 (regionprops 함수가 2차원 행렬에만 적용 가능함)\n # Region을 고려에서 제외하는 경우(2048x2048 이미지 기반, pixel spacing=0.2mm)\n # i) Region bbox 크기 < 400 pixels\n # ii) (현재 사용x) Region bbox 장축<4mm(20pixels), 단축<2mm(10 pixels)\n # issue: # 3. 영상사이즈는 디텍터 크기에 따라 달라질 수 있습니다. 완벽히 하기 위해선 pixel spacing 정보를 받아야 합니다.\n # # 따라서 영상 크기에 대해 기준이 변경되는 것은 현단계에서는 적용할 필요가 없어 보입니다.\n \"\"\"\n\n if len(pred.shape) > 3:\n pred = pred[0]\n gt = gt[0]\n\n num_classes = pred.shape[0]\n image_size = gt.shape[2]\n\n gt_regions = [\n skimage.measure.regionprops(skimage.measure.label(gt[c, :, :]))\n for c in range(num_classes)\n ]\n for c in range(num_classes):\n gt_regions[c] = [\n r for r in gt_regions[c] if r.area > (20 * (image_size / 2048)) ** 2\n ]\n\n pred_regions = [\n [\n skimage.measure.regionprops(skimage.measure.label(pred[c, :, :] > th))\n for c in range(num_classes)\n ]\n for th in prob_ths\n ] # shape - len(prob_th), num_classes\n\n # 초기화\n gt_nums = np.array([len(gt_regions[c]) for c in range(num_classes)])\n pred_nums = np.array(\n [\n [len(pred_regions[thi][c]) for c in range(num_classes)]\n for thi in range(len(prob_ths))\n ]\n )\n tp_nums = np.zeros((len(prob_ths), num_classes))\n fp_nums = pred_nums.copy() # .copy() 없으면 포인터가 같아짐\n\n # Gt-Pred Bbox Iou Matrix\n for c in range(num_classes):\n for thi in range(len(prob_ths)):\n if (gt_nums[c] == 0) or (pred_nums[thi][c] == 0): # np array 이상함;\n continue\n\n iou_matrix = np.zeros((gt_nums[c], pred_nums[thi][c]))\n for gi, gr in enumerate(gt_regions[c]):\n for pi, pr in enumerate(pred_regions[thi][c]):\n iou_matrix[gi, pi] = calc_iou(gr.bbox, pr.bbox)\n\n tp_nums[thi][c] = np.sum(np.any((iou_matrix >= iou_th), axis=1))\n fp_nums[thi][c] -= np.sum(np.any((iou_matrix > iou_th), axis=0))\n\n return gt_nums, pred_nums, tp_nums, fp_nums",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
import sys
import os
import csv
import urllib2, socket, time
import gzip, StringIO
import re, random, types
from bs4 import BeautifulSoup
from datetime import datetime
import json
from HTMLParser import HTMLParser
class MLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def extractData(url,title):
data=""
req=urllib2.Request(url)
response=urllib2.urlopen(req)
html_data=response.read()
soup=BeautifulSoup(html_data)
[s.extract() for s in soup('script')]
d=re.compile(r'.*%s.*' % title)
last_elem=0
for elem in soup(text=d):
last_elem=elem
if last_elem!=0:
p1=last_elem.parent
try1=1
while len(data)<1000:
parent=p1.parent
p1=parent
data=""
for each_child in parent.findChildren():
data+=each_child.get_text().strip().replace('\n','')
print try1
try1+=1
else:
data=""
for each_child in soup.body.findChildren():
data+=each_child.get_text().strip().replace('\n','')
return data
def readData(input_file):
data=json.loads(input_file.read())
for each_r in data:
if each_r['ID']>=1:
s = MLStripper()
s.feed(each_r['title'])
title =s.get_data()
val=len(title)/2
val=val/2
print title[:-val]
article_data=extractData(each_r['url'],title)
print 'url',each_r['url']
print article_data
print '##############################################'
raw_input()
if __name__=="__main__":
if sys.argv>=2:
input_file=open(sys.argv[1],"r")
readData(input_file)
else:
print "Usage: python extractnew.py <data_file_location>"
|
normal
|
{
"blob_id": "2d444c00e4dbdcb143d19752cd1a751169de73d3",
"index": 5746,
"step-1": "import sys\nimport os\nimport csv\nimport urllib2, socket, time\nimport gzip, StringIO\nimport re, random, types\nfrom bs4 import BeautifulSoup\nfrom datetime import datetime\nimport json\nfrom HTMLParser import HTMLParser\n\nclass MLStripper(HTMLParser):\n def __init__(self):\n self.reset()\n self.fed = []\n def handle_data(self, d):\n self.fed.append(d)\n def get_data(self):\n return ''.join(self.fed)\n\ndef extractData(url,title):\n data=\"\"\n req=urllib2.Request(url)\n response=urllib2.urlopen(req)\n html_data=response.read() \n soup=BeautifulSoup(html_data)\n [s.extract() for s in soup('script')]\n d=re.compile(r'.*%s.*' % title)\n last_elem=0\n for elem in soup(text=d):\n last_elem=elem\n if last_elem!=0: \n p1=last_elem.parent \n try1=1 \n while len(data)<1000: \n parent=p1.parent\n p1=parent\n data=\"\" \n for each_child in parent.findChildren():\n data+=each_child.get_text().strip().replace('\\n','') \n print try1\n try1+=1 \n else:\n data=\"\" \n for each_child in soup.body.findChildren():\n data+=each_child.get_text().strip().replace('\\n','') \n return data\n\n\ndef readData(input_file):\n data=json.loads(input_file.read())\n for each_r in data:\n if each_r['ID']>=1:\n s = MLStripper()\n s.feed(each_r['title'])\n title =s.get_data() \n val=len(title)/2\n val=val/2\n print title[:-val]\n article_data=extractData(each_r['url'],title)\n print 'url',each_r['url'] \n print article_data\n print '##############################################'\n raw_input() \nif __name__==\"__main__\":\n if sys.argv>=2:\n input_file=open(sys.argv[1],\"r\")\n readData(input_file)\n else:\n print \"Usage: python extractnew.py <data_file_location>\" \n \n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding:utf-8 -*-
from common import *
import itertools
def iteration_spider():
max_errors = 5
num_errors = 0
for page in itertools.count(1):
url = 'http://example.webscraping.com/view/-{}'.format(page)
html = download(url)
if html is None:
num_errors += 1
if num_errors == max_errors:
break
else:
num_errors = 0
if __name__ == '__main__':
iteration_spider()
|
normal
|
{
"blob_id": "0eaba8f570772de864f52168a597b47a4150d015",
"index": 5924,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef iteration_spider():\n max_errors = 5\n num_errors = 0\n for page in itertools.count(1):\n url = 'http://example.webscraping.com/view/-{}'.format(page)\n html = download(url)\n if html is None:\n num_errors += 1\n if num_errors == max_errors:\n break\n else:\n num_errors = 0\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef iteration_spider():\n max_errors = 5\n num_errors = 0\n for page in itertools.count(1):\n url = 'http://example.webscraping.com/view/-{}'.format(page)\n html = download(url)\n if html is None:\n num_errors += 1\n if num_errors == max_errors:\n break\n else:\n num_errors = 0\n\n\nif __name__ == '__main__':\n iteration_spider()\n",
"step-4": "from common import *\nimport itertools\n\n\ndef iteration_spider():\n max_errors = 5\n num_errors = 0\n for page in itertools.count(1):\n url = 'http://example.webscraping.com/view/-{}'.format(page)\n html = download(url)\n if html is None:\n num_errors += 1\n if num_errors == max_errors:\n break\n else:\n num_errors = 0\n\n\nif __name__ == '__main__':\n iteration_spider()\n",
"step-5": "# -*- coding:utf-8 -*-\n\nfrom common import *\nimport itertools\n\ndef iteration_spider():\n\tmax_errors = 5\n\tnum_errors = 0\n\tfor page in itertools.count(1):\n\t\turl = 'http://example.webscraping.com/view/-{}'.format(page)\n\t\thtml = download(url)\n\t\tif html is None:\n\t\t\tnum_errors += 1\n\t\t\tif num_errors == max_errors:\n\t\t\t\tbreak\n\t\telse:\n\t\t\tnum_errors = 0\n\t\t\t\n\nif __name__ == '__main__':\n\titeration_spider()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import random
readpath = './DBLP/'
writepath = './DBLP/'
dataname = 'dblp.txt'
labelname = 'node2label.txt'
testsetname = writepath + 'dblp_testset.txt'
def run(save_rate):
rdataname = readpath + dataname
rlabelname = readpath + labelname
wdataname = writepath + dataname
wlabelname = writepath + labelname
ordata = []
all_user = set()
all_time = set()
rename = dict()
newdatasize = 0
with open(rdataname, 'r') as r:
for line in r:
x = line.strip('\n').split()
x[2] = float(x[2])
ordata.append(x)
ordata = sorted(ordata, key = lambda x:x[2])
datasize = len(ordata)
savesize = int(datasize * save_rate)
print("原始数据中共有 %d 条\n预计保留 %d 条" % (datasize, savesize))
while(savesize != datasize and ordata[savesize-1][2] == ordata[savesize][2]):
savesize = savesize + 1
print("实际保留 %d 条" % savesize)
print("实际切割比例" + str(savesize/datasize))
for i in range(savesize):
x = ordata[i]
a = str(x[0])
b = str(x[1])
all_user.update({a,b})
#print(len(all_user))
all_time.add(x[2])
print("实际保留数据中,用户数量 %d 个,不同时间节点 %d 个" %(len(all_user), len(all_time)))
newdatasize = savesize
list_all_user = list(all_user)
list_all_user = [int(i) for i in list_all_user]
list_all_user.sort()
step = 0
for i in list_all_user:
rename[i] = step
#print(i, rename[i])
step = step + 1
flag = os.path.exists(writepath)
if not flag:
os.makedirs(writepath)
with open(wdataname, 'w') as w:
for i in range(newdatasize):
x = ordata[i]
a = str(rename[int(x[0])])
b = str(rename[int(x[1])])
w.write(a + ' ' + b + ' ' + str(x[2])+'\n')
with open(testsetname, 'w') as w:
index = 0
for i in range(newdatasize,datasize):
x = ordata[i]
if(int(x[0]) not in rename or int(x[1]) not in rename):
continue
a = str(rename[int(x[0])])
b = str(rename[int(x[1])])
w.write(a + ' ' + b + ' ' + str(x[2])+'\n')
index = index+1
print('预计测试集剩余数量 %d'%(datasize-newdatasize+1))
print('测试集剩余数量 %d'%(index))
temp = 0
with open(rlabelname, 'r') as r:
with open(wlabelname, 'w') as w:
for line in r:
x = line.strip('\n').split()
if(x[0] in all_user):
temp = temp + 1
a = str(rename[int(x[0])])
w.write(a + ' ' + x[1] + '\n')
print("标签集数量 " + str(temp)+ " 个")
if __name__ == '__main__':
run(0.7)
|
normal
|
{
"blob_id": "4bd6a7c7fc6a788b2cb010f6513872bd3e0d396c",
"index": 5011,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef run(save_rate):\n rdataname = readpath + dataname\n rlabelname = readpath + labelname\n wdataname = writepath + dataname\n wlabelname = writepath + labelname\n ordata = []\n all_user = set()\n all_time = set()\n rename = dict()\n newdatasize = 0\n with open(rdataname, 'r') as r:\n for line in r:\n x = line.strip('\\n').split()\n x[2] = float(x[2])\n ordata.append(x)\n ordata = sorted(ordata, key=lambda x: x[2])\n datasize = len(ordata)\n savesize = int(datasize * save_rate)\n print('原始数据中共有 %d 条\\n预计保留 %d 条' % (datasize, savesize))\n while savesize != datasize and ordata[savesize - 1][2] == ordata[\n savesize][2]:\n savesize = savesize + 1\n print('实际保留 %d 条' % savesize)\n print('实际切割比例' + str(savesize / datasize))\n for i in range(savesize):\n x = ordata[i]\n a = str(x[0])\n b = str(x[1])\n all_user.update({a, b})\n all_time.add(x[2])\n print('实际保留数据中,用户数量 %d 个,不同时间节点 %d 个' % (len(all_user), len(all_time)))\n newdatasize = savesize\n list_all_user = list(all_user)\n list_all_user = [int(i) for i in list_all_user]\n list_all_user.sort()\n step = 0\n for i in list_all_user:\n rename[i] = step\n step = step + 1\n flag = os.path.exists(writepath)\n if not flag:\n os.makedirs(writepath)\n with open(wdataname, 'w') as w:\n for i in range(newdatasize):\n x = ordata[i]\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n with open(testsetname, 'w') as w:\n index = 0\n for i in range(newdatasize, datasize):\n x = ordata[i]\n if int(x[0]) not in rename or int(x[1]) not in rename:\n continue\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n index = index + 1\n print('预计测试集剩余数量 %d' % (datasize - newdatasize + 1))\n print('测试集剩余数量 %d' % index)\n temp = 0\n with open(rlabelname, 'r') as r:\n with open(wlabelname, 'w') as w:\n for line in r:\n x = line.strip('\\n').split()\n if x[0] in all_user:\n temp = temp + 1\n a = str(rename[int(x[0])])\n w.write(a + ' ' + x[1] + '\\n')\n print('标签集数量 ' + str(temp) + ' 个')\n\n\nif __name__ == '__main__':\n run(0.7)\n",
"step-3": "<mask token>\nreadpath = './DBLP/'\nwritepath = './DBLP/'\ndataname = 'dblp.txt'\nlabelname = 'node2label.txt'\ntestsetname = writepath + 'dblp_testset.txt'\n\n\ndef run(save_rate):\n rdataname = readpath + dataname\n rlabelname = readpath + labelname\n wdataname = writepath + dataname\n wlabelname = writepath + labelname\n ordata = []\n all_user = set()\n all_time = set()\n rename = dict()\n newdatasize = 0\n with open(rdataname, 'r') as r:\n for line in r:\n x = line.strip('\\n').split()\n x[2] = float(x[2])\n ordata.append(x)\n ordata = sorted(ordata, key=lambda x: x[2])\n datasize = len(ordata)\n savesize = int(datasize * save_rate)\n print('原始数据中共有 %d 条\\n预计保留 %d 条' % (datasize, savesize))\n while savesize != datasize and ordata[savesize - 1][2] == ordata[\n savesize][2]:\n savesize = savesize + 1\n print('实际保留 %d 条' % savesize)\n print('实际切割比例' + str(savesize / datasize))\n for i in range(savesize):\n x = ordata[i]\n a = str(x[0])\n b = str(x[1])\n all_user.update({a, b})\n all_time.add(x[2])\n print('实际保留数据中,用户数量 %d 个,不同时间节点 %d 个' % (len(all_user), len(all_time)))\n newdatasize = savesize\n list_all_user = list(all_user)\n list_all_user = [int(i) for i in list_all_user]\n list_all_user.sort()\n step = 0\n for i in list_all_user:\n rename[i] = step\n step = step + 1\n flag = os.path.exists(writepath)\n if not flag:\n os.makedirs(writepath)\n with open(wdataname, 'w') as w:\n for i in range(newdatasize):\n x = ordata[i]\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n with open(testsetname, 'w') as w:\n index = 0\n for i in range(newdatasize, datasize):\n x = ordata[i]\n if int(x[0]) not in rename or int(x[1]) not in rename:\n continue\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n index = index + 1\n print('预计测试集剩余数量 %d' % (datasize - newdatasize + 1))\n print('测试集剩余数量 %d' % index)\n temp = 0\n with open(rlabelname, 'r') as r:\n with open(wlabelname, 'w') as w:\n for line in r:\n x = line.strip('\\n').split()\n if x[0] in all_user:\n temp = temp + 1\n a = str(rename[int(x[0])])\n w.write(a + ' ' + x[1] + '\\n')\n print('标签集数量 ' + str(temp) + ' 个')\n\n\nif __name__ == '__main__':\n run(0.7)\n",
"step-4": "import os\nimport random\nreadpath = './DBLP/'\nwritepath = './DBLP/'\ndataname = 'dblp.txt'\nlabelname = 'node2label.txt'\ntestsetname = writepath + 'dblp_testset.txt'\n\n\ndef run(save_rate):\n rdataname = readpath + dataname\n rlabelname = readpath + labelname\n wdataname = writepath + dataname\n wlabelname = writepath + labelname\n ordata = []\n all_user = set()\n all_time = set()\n rename = dict()\n newdatasize = 0\n with open(rdataname, 'r') as r:\n for line in r:\n x = line.strip('\\n').split()\n x[2] = float(x[2])\n ordata.append(x)\n ordata = sorted(ordata, key=lambda x: x[2])\n datasize = len(ordata)\n savesize = int(datasize * save_rate)\n print('原始数据中共有 %d 条\\n预计保留 %d 条' % (datasize, savesize))\n while savesize != datasize and ordata[savesize - 1][2] == ordata[\n savesize][2]:\n savesize = savesize + 1\n print('实际保留 %d 条' % savesize)\n print('实际切割比例' + str(savesize / datasize))\n for i in range(savesize):\n x = ordata[i]\n a = str(x[0])\n b = str(x[1])\n all_user.update({a, b})\n all_time.add(x[2])\n print('实际保留数据中,用户数量 %d 个,不同时间节点 %d 个' % (len(all_user), len(all_time)))\n newdatasize = savesize\n list_all_user = list(all_user)\n list_all_user = [int(i) for i in list_all_user]\n list_all_user.sort()\n step = 0\n for i in list_all_user:\n rename[i] = step\n step = step + 1\n flag = os.path.exists(writepath)\n if not flag:\n os.makedirs(writepath)\n with open(wdataname, 'w') as w:\n for i in range(newdatasize):\n x = ordata[i]\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n with open(testsetname, 'w') as w:\n index = 0\n for i in range(newdatasize, datasize):\n x = ordata[i]\n if int(x[0]) not in rename or int(x[1]) not in rename:\n continue\n a = str(rename[int(x[0])])\n b = str(rename[int(x[1])])\n w.write(a + ' ' + b + ' ' + str(x[2]) + '\\n')\n index = index + 1\n print('预计测试集剩余数量 %d' % (datasize - newdatasize + 1))\n print('测试集剩余数量 %d' % index)\n temp = 0\n with open(rlabelname, 'r') as r:\n with open(wlabelname, 'w') as w:\n for line in r:\n x = line.strip('\\n').split()\n if x[0] in all_user:\n temp = temp + 1\n a = str(rename[int(x[0])])\n w.write(a + ' ' + x[1] + '\\n')\n print('标签集数量 ' + str(temp) + ' 个')\n\n\nif __name__ == '__main__':\n run(0.7)\n",
"step-5": "import os\nimport random\n\nreadpath = './DBLP/'\nwritepath = './DBLP/'\ndataname = 'dblp.txt'\nlabelname = 'node2label.txt'\ntestsetname = writepath + 'dblp_testset.txt'\n\ndef run(save_rate):\n\trdataname = readpath + dataname\n\trlabelname = readpath + labelname\n\twdataname = writepath + dataname\n\twlabelname = writepath + labelname\n\t\n\tordata = []\n\tall_user = set()\n\tall_time = set()\n\trename = dict()\n\tnewdatasize = 0\n\n\twith open(rdataname, 'r') as r:\n\t\tfor line in r:\n\t\t\tx = line.strip('\\n').split()\n\t\t\tx[2] = float(x[2])\n\t\t\tordata.append(x)\n\t\tordata = sorted(ordata, key = lambda x:x[2])\n\t\t\n\t\tdatasize = len(ordata)\n\t\tsavesize = int(datasize * save_rate)\n\t\tprint(\"原始数据中共有 %d 条\\n预计保留 %d 条\" % (datasize, savesize))\n\n\t\twhile(savesize != datasize and ordata[savesize-1][2] == ordata[savesize][2]):\n\t\t\tsavesize = savesize + 1\n\t\tprint(\"实际保留 %d 条\" % savesize)\n\t\tprint(\"实际切割比例\" + str(savesize/datasize))\n\t\t\n\t\tfor i in range(savesize):\n\t\t\tx = ordata[i]\n\t\t\ta = str(x[0])\n\t\t\tb = str(x[1])\n\t\t\tall_user.update({a,b})\n\t\t\t#print(len(all_user))\n\t\t\tall_time.add(x[2])\n\t\tprint(\"实际保留数据中,用户数量 %d 个,不同时间节点 %d 个\" %(len(all_user), len(all_time)))\n\t\tnewdatasize = savesize\n\t\t\n\n\t\tlist_all_user = list(all_user)\n\t\tlist_all_user = [int(i) for i in list_all_user]\n\t\tlist_all_user.sort()\n\t\tstep = 0\n\t\tfor i in list_all_user:\n\t\t\trename[i] = step\n\t\t\t#print(i, rename[i])\n\t\t\tstep = step + 1\n\t\t\t\n\t\t\n\n\t\tflag = os.path.exists(writepath)\n\t\tif not flag:\n\t\t\tos.makedirs(writepath)\n\n\t\twith open(wdataname, 'w') as w:\n\t\t\tfor i in range(newdatasize):\n\t\t\t\tx = ordata[i]\n\t\t\t\ta = str(rename[int(x[0])])\n\t\t\t\tb = str(rename[int(x[1])])\n\t\t\t\tw.write(a + ' ' + b + ' ' + str(x[2])+'\\n')\n\n\n\t\twith open(testsetname, 'w') as w:\n\t\t\tindex = 0\n\t\t\tfor i in range(newdatasize,datasize):\n\t\t\t\tx = ordata[i]\n\n\t\t\t\tif(int(x[0]) not in rename or int(x[1]) not in rename):\n\t\t\t\t\tcontinue\n\t\t\t\ta = str(rename[int(x[0])])\n\t\t\t\tb = str(rename[int(x[1])])\n\t\t\t\tw.write(a + ' ' + b + ' ' + str(x[2])+'\\n')\n\t\t\t\tindex = index+1\n\t\t\tprint('预计测试集剩余数量 %d'%(datasize-newdatasize+1))\n\t\t\tprint('测试集剩余数量 %d'%(index))\n\n\t\ttemp = 0\n\t\twith open(rlabelname, 'r') as r:\n\t\t\twith open(wlabelname, 'w') as w:\n\t\t\t\tfor line in r:\n\t\t\t\t\tx = line.strip('\\n').split()\n\t\t\t\t\tif(x[0] in all_user):\n\t\t\t\t\t\ttemp = temp + 1\n\t\t\t\t\t\ta = str(rename[int(x[0])])\n\t\t\t\t\t\tw.write(a + ' ' + x[1] + '\\n')\n\t\tprint(\"标签集数量 \" + str(temp)+ \" 个\")\n\t\nif __name__ == '__main__':\n\trun(0.7)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
from .embedpeek import EmbedPeek
__red_end_user_data_statement__ = "This cog does not persistently store data or metadata about users."
def setup(bot):
bot.add_cog(EmbedPeek(bot))
|
normal
|
{
"blob_id": "b66142e0b674d3920b8e3ad74e0d0b753f0a78c3",
"index": 3471,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef setup(bot):\n bot.add_cog(EmbedPeek(bot))\n",
"step-3": "<mask token>\n__red_end_user_data_statement__ = (\n 'This cog does not persistently store data or metadata about users.')\n\n\ndef setup(bot):\n bot.add_cog(EmbedPeek(bot))\n",
"step-4": "from .embedpeek import EmbedPeek\n__red_end_user_data_statement__ = (\n 'This cog does not persistently store data or metadata about users.')\n\n\ndef setup(bot):\n bot.add_cog(EmbedPeek(bot))\n",
"step-5": "from .embedpeek import EmbedPeek\n\n__red_end_user_data_statement__ = \"This cog does not persistently store data or metadata about users.\"\n\n\ndef setup(bot):\n bot.add_cog(EmbedPeek(bot))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Question:
You are given a string s consisting only of digits 0-9, commas ,, and dots .
Your task is to complete the regex_pattern defined below, which will be used to
re.split() all of the , and . symbols in s.
It’s guaranteed that every comma and every dot in s is preceded and followed
by a digit.
Sample Input:
100,000,000.000
Sample Output:
100
000
000
000
"""
# Solution:
import re
regex_pattern = r"[,.]"
print("\n".join(re.split(regex_pattern, input())))
|
normal
|
{
"blob_id": "020691fe2c7e7092d45415b72ce1804618421a2a",
"index": 9519,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('\\n'.join(re.split(regex_pattern, input())))\n",
"step-3": "<mask token>\nregex_pattern = '[,.]'\nprint('\\n'.join(re.split(regex_pattern, input())))\n",
"step-4": "<mask token>\nimport re\nregex_pattern = '[,.]'\nprint('\\n'.join(re.split(regex_pattern, input())))\n",
"step-5": "\"\"\"\nQuestion:\n\nYou are given a string s consisting only of digits 0-9, commas ,, and dots .\n\nYour task is to complete the regex_pattern defined below, which will be used to\nre.split() all of the , and . symbols in s.\n\nIt’s guaranteed that every comma and every dot in s is preceded and followed\nby a digit.\n\nSample Input:\n 100,000,000.000\n\nSample Output:\n 100\n 000\n 000\n 000\n\"\"\"\n\n# Solution:\n\n\nimport re\n\nregex_pattern = r\"[,.]\"\n\nprint(\"\\n\".join(re.split(regex_pattern, input())))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
a=range(1,11) #1~10숫자를 에이에 저장
b=1
for i in a: #a에있는 원소를 b에 곱하고 비에 저장
b*=i
print(b)
|
normal
|
{
"blob_id": "8cb7290792f9390dd350e0c79711e0dd72d6063b",
"index": 9508,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in a:\n b *= i\nprint(b)\n",
"step-3": "a = range(1, 11)\nb = 1\nfor i in a:\n b *= i\nprint(b)\n",
"step-4": "a=range(1,11) #1~10숫자를 에이에 저장\nb=1\nfor i in a: #a에있는 원소를 b에 곱하고 비에 저장\n b*=i\nprint(b)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render
# Create your views here.
from django.shortcuts import redirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import Http404, HttpResponseForbidden
from django.shortcuts import render
from django.urls import reverse
from django.views.generic.edit import FormMixin
from django.contrib.auth.decorators import login_required
from django.views.generic import DetailView, ListView
# from .forms import ComposeForm
# from .models import Thread, ChatMessage
from django.shortcuts import render
import os
import django
os.environ["DJANGO_SETTINGS_MODULE"] = 'arizona.settings'
django.setup()
def index(request):
return render(request, 'canyon/index.html')
def results(request):
return render(request, 'canyon/results.html')
|
normal
|
{
"blob_id": "c385fe2af9aebc9c4a42d4db5a341fcedeec3898",
"index": 3579,
"step-1": "<mask token>\n\n\ndef index(request):\n return render(request, 'canyon/index.html')\n\n\ndef results(request):\n return render(request, 'canyon/results.html')\n",
"step-2": "<mask token>\ndjango.setup()\n\n\ndef index(request):\n return render(request, 'canyon/index.html')\n\n\ndef results(request):\n return render(request, 'canyon/results.html')\n",
"step-3": "<mask token>\nos.environ['DJANGO_SETTINGS_MODULE'] = 'arizona.settings'\ndjango.setup()\n\n\ndef index(request):\n return render(request, 'canyon/index.html')\n\n\ndef results(request):\n return render(request, 'canyon/results.html')\n",
"step-4": "from django.shortcuts import render\nfrom django.shortcuts import redirect\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import Http404, HttpResponseForbidden\nfrom django.shortcuts import render\nfrom django.urls import reverse\nfrom django.views.generic.edit import FormMixin\nfrom django.contrib.auth.decorators import login_required\nfrom django.views.generic import DetailView, ListView\nfrom django.shortcuts import render\nimport os\nimport django\nos.environ['DJANGO_SETTINGS_MODULE'] = 'arizona.settings'\ndjango.setup()\n\n\ndef index(request):\n return render(request, 'canyon/index.html')\n\n\ndef results(request):\n return render(request, 'canyon/results.html')\n",
"step-5": "from django.shortcuts import render\n\n# Create your views here.\nfrom django.shortcuts import redirect\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.http import Http404, HttpResponseForbidden\nfrom django.shortcuts import render\nfrom django.urls import reverse\nfrom django.views.generic.edit import FormMixin\nfrom django.contrib.auth.decorators import login_required\nfrom django.views.generic import DetailView, ListView\n\n# from .forms import ComposeForm\n# from .models import Thread, ChatMessage\n\nfrom django.shortcuts import render\nimport os\nimport django\nos.environ[\"DJANGO_SETTINGS_MODULE\"] = 'arizona.settings'\ndjango.setup()\n\n\ndef index(request):\n return render(request, 'canyon/index.html')\n\n\ndef results(request):\n return render(request, 'canyon/results.html')\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import matplotlib.pyplot as plotOp
import numpy as np
from random import randint
import re as regexOp
|
normal
|
{
"blob_id": "6c0a1d4ffd64e0566be53937d9b48975f2530852",
"index": 7767,
"step-1": "<mask token>\n",
"step-2": "import matplotlib.pyplot as plotOp\nimport numpy as np\nfrom random import randint\nimport re as regexOp\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
# prevent numpy exponential
# notation on print, default False
np.set_printoptions(suppress=True)
y_cord_df = pd.DataFrame(data=None, columns=['Time', 'Orien'])
list_no = np.arange(0.0, 108000.0, 1.0)
y_cord_df['Time'] = (list_no*(1/60))/60
rolling_avg_duration= 10 #in seconds
def vel_det(file, legend_label, line_color):
fps=60
data_df = pd.read_hdf(path_or_buf=file)
bodyparts = data_df.columns.get_level_values(1)
coords = data_df.columns.get_level_values(2)
bodyparts2plot = bodyparts
scorer = data_df.columns.get_level_values(0)[0]
Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))
column_title = bodyparts + "_" + coords
data_df.columns = column_title
# calculate the time elapsed per frame and append column
data_df['Time Elapsed'] = Time / fps
# print(data_df)
# what's being plotted
# plt.plot(data_df['Time Elapsed'], data_df['velocity_roll'], color=line_color, marker='o', markersize=0.4, linewidth=0.3, label=legend_label) # scatter plot with faint lines
# plt.plot(data_df['Time Elapsed']/60, data_df['velocity_roll'], color=line_color, linewidth=1, label=legend_label)
# plot formatting
# plt.xlabel('time (seconds)')
# plt.ylabel('velocity (pixels/second)')
# plt.legend(loc=2)
# plt.title('total distance traveled vs. time: ' + path)
animal = []
animal[:] = ' '.join(file.split()[2:5])
# plt.title('Total Distance vs. Time for: ' + ' '.join(file.split()[:2]) + " "+ ''.join(animal[:2]))
# plt.title(str(rolling_avg_duration)+' second Rolling Velocity Pretreat 3mkgNaltrexone+5mgkg U50')
data_df['Time Elapsed'] = Time / fps
y_cord_df[file] = data_df['head_y']
y_cord_df[file+'_orient'] = np.NaN
i = 0
# rear_values = data_df['head_y'].values<=300
rear_values = data_df['head_y'].values <= 300
print(rear_values)
data_df['Orientation']=rear_values
data_df['GR'] = 'groom'
data_df.loc[rear_values == True, 'GR'] = 'rear'
# for time in Time:
# if data_df['head_y'].iloc[time] >= 234:
# data_df[file + '_orient'] = 'rear'
# i=1+i
# # using 1 for rear
# else:
# # 0 for groom/walk
# data_df[file + '_orient'] = 'groom'
# i=1+i
# print(data_df)
# for values in data_df['head_y']:
# if values >= 234:
# y_cord_df.insert(loc=data_df.loc[], column=file + '_orient', value=1, allow_duplicates=True)
# else:
# # 0 for groom/walk
# y_cord_df.insert(loc=i, column=file+'_orient', value=0, allow_duplicates=True)
# i = i+1
# print('iter'+str(i))
# print(data_df['Orientation'])
filt_df = data_df['head_y'] > 400
print(data_df[filt_df])
plt.figure(figsize=(6, 9.5))
# plt.plot(data_df['Time Elapsed']/60, data_df["GR"], color=line_color, linewidth=1, label=legend_label)
# plt.plot(data_df['Time Elapsed']/60, data_df['head_y']*-1, color=line_color, linewidth=1, label=legend_label)
plt.plot(data_df[filt_df].head_y,data_df[filt_df].index/3600, color=line_color, linewidth=1, label=legend_label)
# plt.axhline(y=-300)
leg = plt.legend()
font = {'family': 'Arial',
'size': 12}
plt.rc('font', **font)
plt.rc('lines', linewidth = 1)
for i in leg.legendHandles:
i.set_linewidth(3)
plt.xlabel('y coordinate(pixels)', fontsize=12)
plt.ylabel('time(minutes)', fontsize=12)
plt.title(legend_label)
plt.savefig(legend_label+'.jpg', format='jpg')
plt.show()
if __name__ == '__main__':
"""Saline Data"""
# vel_det(file='Saline_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline F0', line_color='yellowgreen')
# vel_det(file='Saline_Ai14_OPRK1_C2_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline F1', line_color='lightgreen')
# vel_det(file='Saline_Ai14_OPRK1_C1_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline F2', line_color='lightgreen')
#
# vel_det(file='Saline_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline M1', line_color='green')
# vel_det(file='Saline_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline M2', line_color='lightgreen')
# vel_det(file='Saline_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline M3', line_color='lightgreen')
# vel_det(file='Saline_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='Saline M4', line_color='lime')
# only_saline = y_cord_df.loc[:, ['Saline_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C2_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C1_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Saline_Ai14_OPRK1_C1_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]
# y_cord_df['Avg Vel Saline'] = only_saline.mean(axis=1)
# avg_df['Avg Vel Saline SEM'] = stats.sem(only_saline, axis=1)
# plt.plot(avg_df['Time'], avg_df['Avg Vel Saline'], color='black', linewidth=1, label='Average Velocity Saline+Saline')
#
"""Naltrexone Data"""
# vel_det(file='Naltr_U50_Ai14_OPRK1_C2_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F0 Pretreat 3mkg Naltrexone+5mgkg U50', line_color='#ee4466')
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000filtered.h5',
# legend_label='F1 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='orangered')
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F2 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='darkred')
#
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M1 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='red')
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M2 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='red')
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M3 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='firebrick')
# vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M4 Pretreat 3mgkg Naltrexone+5mkg U50', line_color='darksalmon')
# only_naltr = avg_df.loc[:,
# ['Nalt_U50_Ai14_OPRK1_C1_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Nalt_U50_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Nalt_U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Nalt_U50_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Nalt_U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Naltr_U50_Ai14_OPRK1_C2_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'Nalt_U50_Ai14_OPRK1_C1_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]
# avg_df['Avg Vel Naltr'] = only_naltr.mean(axis=1)
# avg_df['Avg Vel Naltr SEM'] = stats.sem(only_naltr, axis=1)
# plt.plot(avg_df['Time'], avg_df['Avg Vel Naltr'], color='red', linewidth=1, label='Average Velocity 3mgkg Naltr+5mgkg U50')
#
#
"""U50 Data"""
vel_det(file='U50_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='F0 5mgkg U50', line_color='steelblue')
vel_det(file='U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='F1 5mgkg U50', line_color='deepskyblue')
vel_det(file='U50_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='F2 5mgkg U50', line_color='powderblue')
vel_det(file='U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='M1 5mgkg U50', line_color='blue')
vel_det(file='U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='M2 5mgkg U50', line_color='blue')
vel_det(file='U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='M3 5mgkg U50', line_color='lightblue')
vel_det(file='U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
legend_label='M4 5mgkg U50', line_color='turquoise')
# only_U50 = avg_df.loc[:,
# ['U50_Ai14_OPRK1_C1_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C1_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C2_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',
# 'U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]
# avg_df['Avg Vel U50'] = only_U50.mean(axis=1)
# avg_df['Avg Vel U50 SEM'] = stats.sem(only_U50, axis=1)
# plt.plot(avg_df['Time'], avg_df['Avg Vel U50'], color='orange', linewidth=1, label='Average Velocity Saline+5mgkg U50')
#
#
"""NORBNI U50 Data"""
#
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F0_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F0 10mgkg NORBNI+5mgkg U50', line_color='orange')
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F1_sDLC_resnet50_SideViewNov1shuffle1_180000filtered.h5',
# legend_label='F1 10mgkg NORBNI+5mgkg U50', line_color='darkorange')
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F2_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F2 10mgkg NORBNI+5mgkg U50', line_color='coral')
#
#
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M1_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M1 10mgkg NORBNI+5mgkg U50', line_color='orange')
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M2_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M2 10mgkg NORBNI+5mgkg U50', line_color='orange')
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M3_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M3 10mgkg NORBNI+5mgkg U50', line_color='orange') #tiger color
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M4_SDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M4 10mgkg NORBNI+5mkg U50', line_color='#ed8203') #apricot color
# only_NORBNI = avg_df.loc[:,
# [
# 'NORBNI_U50_Ai14_OPRK1_C2_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',
# 'NORBNI_U50_Ai14_OPRK1_C2_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',
# 'NORBNI_U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',
# 'NORBNI_U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5'
# ]]
# avg_df['Avg Vel NORBNI'] = only_NORBNI.mean(axis=1)
# avg_df['Avg Vel NORBNI SEM'] = stats.sem(only_NORBNI, axis=1)
# plt.plot(avg_df['Time'], avg_df['Avg Vel NORBNI'], color='blue', linewidth=1,
# label='Average Velocity 10mgkg NORBNI +5mgkg U50')
#
"""NORBNI Saline"""
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C2_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F1 10mgkg NORBNI+Saline', line_color='purple')
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F2 10mgkg NORBNI+Saline', line_color='purple')
# vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F0_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='F0 10mgkg NORBNI+Saline', line_color='violet')
#
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M1 10mgkg NORBNI+Saline', line_color='blueviolet')
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M2 10mgkg NORBNI+Saline', line_color='blueviolet')
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M4 10mkg NORBNI+Saline', line_color='mediumorchid')
# vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',
# legend_label='M3 10mgkg NORBNI+Saline', line_color='purple')
#
# plt.fill_between(avg_df['Time'], avg_df["Avg Vel Saline"]-avg_df["Avg Vel Saline SEM"],
# avg_df["Avg Vel Saline"]+avg_df["Avg Vel Saline SEM"], alpha=0.25, facecolor='black', edgecolor='black')
# plt.fill_between(avg_df['Time'], avg_df["Avg Vel Naltr"]-avg_df["Avg Vel Naltr SEM"],
# avg_df["Avg Vel Naltr"]+avg_df["Avg Vel Naltr SEM"], alpha=0.25, facecolor='red', edgecolor='red')
# plt.fill_between(avg_df['Time'], avg_df["Avg Vel U50"]-avg_df["Avg Vel U50 SEM"],
# avg_df["Avg Vel U50"]+avg_df["Avg Vel U50 SEM"], alpha=0.25, facecolor='orange', edgecolor='orange')
# plt.fill_between(avg_df['Time'], avg_df["Avg Vel NORBNI"]-avg_df["Avg Vel NORBNI SEM"],
# avg_df["Avg Vel NORBNI"]+avg_df["Avg Vel NORBNI SEM"], alpha=0.25, facecolor='blue', edgecolor='blue')
# plt.plot()
# leg = plt.legend()
# font = {'family': 'Arial',
# 'size': 12}
# plt.rc('font', **font)
# plt.rc('lines', linewidth = 1)
# for i in leg.legendHandles:
# i.set_linewidth(3)
# plt.xlabel('time (minutes)', fontsize=12)
# plt.ylabel('pixel', fontsize=12)
# plt.title('F2 NORBNI, NORBNI+U50, Saline Head Inverted Y-coordinate')
# plt.show()
|
normal
|
{
"blob_id": "ba5171d3de87ec01770a7174d9783d5058b0fced",
"index": 9896,
"step-1": "<mask token>\n\n\ndef vel_det(file, legend_label, line_color):\n fps = 60\n data_df = pd.read_hdf(path_or_buf=file)\n bodyparts = data_df.columns.get_level_values(1)\n coords = data_df.columns.get_level_values(2)\n bodyparts2plot = bodyparts\n scorer = data_df.columns.get_level_values(0)[0]\n Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))\n column_title = bodyparts + '_' + coords\n data_df.columns = column_title\n data_df['Time Elapsed'] = Time / fps\n animal = []\n animal[:] = ' '.join(file.split()[2:5])\n data_df['Time Elapsed'] = Time / fps\n y_cord_df[file] = data_df['head_y']\n y_cord_df[file + '_orient'] = np.NaN\n i = 0\n rear_values = data_df['head_y'].values <= 300\n print(rear_values)\n data_df['Orientation'] = rear_values\n data_df['GR'] = 'groom'\n data_df.loc[rear_values == True, 'GR'] = 'rear'\n filt_df = data_df['head_y'] > 400\n print(data_df[filt_df])\n plt.figure(figsize=(6, 9.5))\n plt.plot(data_df[filt_df].head_y, data_df[filt_df].index / 3600, color=\n line_color, linewidth=1, label=legend_label)\n leg = plt.legend()\n font = {'family': 'Arial', 'size': 12}\n plt.rc('font', **font)\n plt.rc('lines', linewidth=1)\n for i in leg.legendHandles:\n i.set_linewidth(3)\n plt.xlabel('y coordinate(pixels)', fontsize=12)\n plt.ylabel('time(minutes)', fontsize=12)\n plt.title(legend_label)\n plt.savefig(legend_label + '.jpg', format='jpg')\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\nnp.set_printoptions(suppress=True)\n<mask token>\n\n\ndef vel_det(file, legend_label, line_color):\n fps = 60\n data_df = pd.read_hdf(path_or_buf=file)\n bodyparts = data_df.columns.get_level_values(1)\n coords = data_df.columns.get_level_values(2)\n bodyparts2plot = bodyparts\n scorer = data_df.columns.get_level_values(0)[0]\n Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))\n column_title = bodyparts + '_' + coords\n data_df.columns = column_title\n data_df['Time Elapsed'] = Time / fps\n animal = []\n animal[:] = ' '.join(file.split()[2:5])\n data_df['Time Elapsed'] = Time / fps\n y_cord_df[file] = data_df['head_y']\n y_cord_df[file + '_orient'] = np.NaN\n i = 0\n rear_values = data_df['head_y'].values <= 300\n print(rear_values)\n data_df['Orientation'] = rear_values\n data_df['GR'] = 'groom'\n data_df.loc[rear_values == True, 'GR'] = 'rear'\n filt_df = data_df['head_y'] > 400\n print(data_df[filt_df])\n plt.figure(figsize=(6, 9.5))\n plt.plot(data_df[filt_df].head_y, data_df[filt_df].index / 3600, color=\n line_color, linewidth=1, label=legend_label)\n leg = plt.legend()\n font = {'family': 'Arial', 'size': 12}\n plt.rc('font', **font)\n plt.rc('lines', linewidth=1)\n for i in leg.legendHandles:\n i.set_linewidth(3)\n plt.xlabel('y coordinate(pixels)', fontsize=12)\n plt.ylabel('time(minutes)', fontsize=12)\n plt.title(legend_label)\n plt.savefig(legend_label + '.jpg', format='jpg')\n plt.show()\n\n\nif __name__ == '__main__':\n \"\"\"Saline Data\"\"\"\n \"\"\"Naltrexone Data\"\"\"\n \"\"\"U50 Data\"\"\"\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F0 5mgkg U50', line_color='steelblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F1 5mgkg U50', line_color='deepskyblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F2 5mgkg U50', line_color='powderblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M1 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M2 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M3 5mgkg U50', line_color='lightblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M4 5mgkg U50', line_color='turquoise')\n \"\"\"NORBNI U50 Data\"\"\"\n \"\"\"NORBNI Saline\"\"\"\n",
"step-3": "<mask token>\nnp.set_printoptions(suppress=True)\ny_cord_df = pd.DataFrame(data=None, columns=['Time', 'Orien'])\nlist_no = np.arange(0.0, 108000.0, 1.0)\ny_cord_df['Time'] = list_no * (1 / 60) / 60\nrolling_avg_duration = 10\n\n\ndef vel_det(file, legend_label, line_color):\n fps = 60\n data_df = pd.read_hdf(path_or_buf=file)\n bodyparts = data_df.columns.get_level_values(1)\n coords = data_df.columns.get_level_values(2)\n bodyparts2plot = bodyparts\n scorer = data_df.columns.get_level_values(0)[0]\n Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))\n column_title = bodyparts + '_' + coords\n data_df.columns = column_title\n data_df['Time Elapsed'] = Time / fps\n animal = []\n animal[:] = ' '.join(file.split()[2:5])\n data_df['Time Elapsed'] = Time / fps\n y_cord_df[file] = data_df['head_y']\n y_cord_df[file + '_orient'] = np.NaN\n i = 0\n rear_values = data_df['head_y'].values <= 300\n print(rear_values)\n data_df['Orientation'] = rear_values\n data_df['GR'] = 'groom'\n data_df.loc[rear_values == True, 'GR'] = 'rear'\n filt_df = data_df['head_y'] > 400\n print(data_df[filt_df])\n plt.figure(figsize=(6, 9.5))\n plt.plot(data_df[filt_df].head_y, data_df[filt_df].index / 3600, color=\n line_color, linewidth=1, label=legend_label)\n leg = plt.legend()\n font = {'family': 'Arial', 'size': 12}\n plt.rc('font', **font)\n plt.rc('lines', linewidth=1)\n for i in leg.legendHandles:\n i.set_linewidth(3)\n plt.xlabel('y coordinate(pixels)', fontsize=12)\n plt.ylabel('time(minutes)', fontsize=12)\n plt.title(legend_label)\n plt.savefig(legend_label + '.jpg', format='jpg')\n plt.show()\n\n\nif __name__ == '__main__':\n \"\"\"Saline Data\"\"\"\n \"\"\"Naltrexone Data\"\"\"\n \"\"\"U50 Data\"\"\"\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F0 5mgkg U50', line_color='steelblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F1 5mgkg U50', line_color='deepskyblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F2 5mgkg U50', line_color='powderblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M1 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M2 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M3 5mgkg U50', line_color='lightblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M4 5mgkg U50', line_color='turquoise')\n \"\"\"NORBNI U50 Data\"\"\"\n \"\"\"NORBNI Saline\"\"\"\n",
"step-4": "import os\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom scipy import stats\nnp.set_printoptions(suppress=True)\ny_cord_df = pd.DataFrame(data=None, columns=['Time', 'Orien'])\nlist_no = np.arange(0.0, 108000.0, 1.0)\ny_cord_df['Time'] = list_no * (1 / 60) / 60\nrolling_avg_duration = 10\n\n\ndef vel_det(file, legend_label, line_color):\n fps = 60\n data_df = pd.read_hdf(path_or_buf=file)\n bodyparts = data_df.columns.get_level_values(1)\n coords = data_df.columns.get_level_values(2)\n bodyparts2plot = bodyparts\n scorer = data_df.columns.get_level_values(0)[0]\n Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))\n column_title = bodyparts + '_' + coords\n data_df.columns = column_title\n data_df['Time Elapsed'] = Time / fps\n animal = []\n animal[:] = ' '.join(file.split()[2:5])\n data_df['Time Elapsed'] = Time / fps\n y_cord_df[file] = data_df['head_y']\n y_cord_df[file + '_orient'] = np.NaN\n i = 0\n rear_values = data_df['head_y'].values <= 300\n print(rear_values)\n data_df['Orientation'] = rear_values\n data_df['GR'] = 'groom'\n data_df.loc[rear_values == True, 'GR'] = 'rear'\n filt_df = data_df['head_y'] > 400\n print(data_df[filt_df])\n plt.figure(figsize=(6, 9.5))\n plt.plot(data_df[filt_df].head_y, data_df[filt_df].index / 3600, color=\n line_color, linewidth=1, label=legend_label)\n leg = plt.legend()\n font = {'family': 'Arial', 'size': 12}\n plt.rc('font', **font)\n plt.rc('lines', linewidth=1)\n for i in leg.legendHandles:\n i.set_linewidth(3)\n plt.xlabel('y coordinate(pixels)', fontsize=12)\n plt.ylabel('time(minutes)', fontsize=12)\n plt.title(legend_label)\n plt.savefig(legend_label + '.jpg', format='jpg')\n plt.show()\n\n\nif __name__ == '__main__':\n \"\"\"Saline Data\"\"\"\n \"\"\"Naltrexone Data\"\"\"\n \"\"\"U50 Data\"\"\"\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F0 5mgkg U50', line_color='steelblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F1 5mgkg U50', line_color='deepskyblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='F2 5mgkg U50', line_color='powderblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M1 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M2 5mgkg U50', line_color='blue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M3 5mgkg U50', line_color='lightblue')\n vel_det(file=\n 'U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5'\n , legend_label='M4 5mgkg U50', line_color='turquoise')\n \"\"\"NORBNI U50 Data\"\"\"\n \"\"\"NORBNI Saline\"\"\"\n",
"step-5": "import os\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom scipy import stats\n\n\n# prevent numpy exponential\n# notation on print, default False\nnp.set_printoptions(suppress=True)\n\ny_cord_df = pd.DataFrame(data=None, columns=['Time', 'Orien'])\nlist_no = np.arange(0.0, 108000.0, 1.0)\ny_cord_df['Time'] = (list_no*(1/60))/60\nrolling_avg_duration= 10 #in seconds\n\ndef vel_det(file, legend_label, line_color):\n fps=60\n\n data_df = pd.read_hdf(path_or_buf=file)\n bodyparts = data_df.columns.get_level_values(1)\n coords = data_df.columns.get_level_values(2)\n bodyparts2plot = bodyparts\n scorer = data_df.columns.get_level_values(0)[0]\n Time = np.arange(np.size(data_df[scorer][bodyparts2plot[0]]['x'].values))\n column_title = bodyparts + \"_\" + coords\n data_df.columns = column_title\n\n # calculate the time elapsed per frame and append column\n data_df['Time Elapsed'] = Time / fps\n\n # print(data_df)\n\n # what's being plotted\n # plt.plot(data_df['Time Elapsed'], data_df['velocity_roll'], color=line_color, marker='o', markersize=0.4, linewidth=0.3, label=legend_label) # scatter plot with faint lines\n # plt.plot(data_df['Time Elapsed']/60, data_df['velocity_roll'], color=line_color, linewidth=1, label=legend_label)\n # plot formatting\n # plt.xlabel('time (seconds)')\n # plt.ylabel('velocity (pixels/second)')\n # plt.legend(loc=2)\n # plt.title('total distance traveled vs. time: ' + path)\n animal = []\n animal[:] = ' '.join(file.split()[2:5])\n # plt.title('Total Distance vs. Time for: ' + ' '.join(file.split()[:2]) + \" \"+ ''.join(animal[:2]))\n # plt.title(str(rolling_avg_duration)+' second Rolling Velocity Pretreat 3mkgNaltrexone+5mgkg U50')\n\n data_df['Time Elapsed'] = Time / fps\n y_cord_df[file] = data_df['head_y']\n y_cord_df[file+'_orient'] = np.NaN\n\n i = 0\n\n # rear_values = data_df['head_y'].values<=300\n rear_values = data_df['head_y'].values <= 300\n print(rear_values)\n data_df['Orientation']=rear_values\n data_df['GR'] = 'groom'\n data_df.loc[rear_values == True, 'GR'] = 'rear'\n\n # for time in Time:\n # if data_df['head_y'].iloc[time] >= 234:\n # data_df[file + '_orient'] = 'rear'\n # i=1+i\n # # using 1 for rear\n # else:\n # # 0 for groom/walk\n # data_df[file + '_orient'] = 'groom'\n # i=1+i\n # print(data_df)\n # for values in data_df['head_y']:\n # if values >= 234:\n # y_cord_df.insert(loc=data_df.loc[], column=file + '_orient', value=1, allow_duplicates=True)\n # else:\n # # 0 for groom/walk\n # y_cord_df.insert(loc=i, column=file+'_orient', value=0, allow_duplicates=True)\n # i = i+1\n # print('iter'+str(i))\n # print(data_df['Orientation'])\n filt_df = data_df['head_y'] > 400\n print(data_df[filt_df])\n plt.figure(figsize=(6, 9.5))\n # plt.plot(data_df['Time Elapsed']/60, data_df[\"GR\"], color=line_color, linewidth=1, label=legend_label)\n # plt.plot(data_df['Time Elapsed']/60, data_df['head_y']*-1, color=line_color, linewidth=1, label=legend_label)\n plt.plot(data_df[filt_df].head_y,data_df[filt_df].index/3600, color=line_color, linewidth=1, label=legend_label)\n\n # plt.axhline(y=-300)\n\n\n leg = plt.legend()\n font = {'family': 'Arial',\n 'size': 12}\n plt.rc('font', **font)\n plt.rc('lines', linewidth = 1)\n for i in leg.legendHandles:\n i.set_linewidth(3)\n plt.xlabel('y coordinate(pixels)', fontsize=12)\n plt.ylabel('time(minutes)', fontsize=12)\n plt.title(legend_label)\n\n\n plt.savefig(legend_label+'.jpg', format='jpg')\n plt.show()\nif __name__ == '__main__':\n\n \"\"\"Saline Data\"\"\"\n # vel_det(file='Saline_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline F0', line_color='yellowgreen')\n # vel_det(file='Saline_Ai14_OPRK1_C2_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline F1', line_color='lightgreen')\n # vel_det(file='Saline_Ai14_OPRK1_C1_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline F2', line_color='lightgreen')\n #\n # vel_det(file='Saline_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline M1', line_color='green')\n # vel_det(file='Saline_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline M2', line_color='lightgreen')\n # vel_det(file='Saline_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline M3', line_color='lightgreen')\n # vel_det(file='Saline_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='Saline M4', line_color='lime')\n\n\n # only_saline = y_cord_df.loc[:, ['Saline_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C2_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C1_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Saline_Ai14_OPRK1_C1_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]\n # y_cord_df['Avg Vel Saline'] = only_saline.mean(axis=1)\n # avg_df['Avg Vel Saline SEM'] = stats.sem(only_saline, axis=1)\n # plt.plot(avg_df['Time'], avg_df['Avg Vel Saline'], color='black', linewidth=1, label='Average Velocity Saline+Saline')\n #\n \"\"\"Naltrexone Data\"\"\"\n # vel_det(file='Naltr_U50_Ai14_OPRK1_C2_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F0 Pretreat 3mkg Naltrexone+5mgkg U50', line_color='#ee4466')\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000filtered.h5',\n # legend_label='F1 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='orangered')\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F2 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='darkred')\n #\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M1 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='red')\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M2 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='red')\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M3 Pretreat 3mgkg Naltrexone+5mgkg U50', line_color='firebrick')\n # vel_det(file='Nalt_U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M4 Pretreat 3mgkg Naltrexone+5mkg U50', line_color='darksalmon')\n\n # only_naltr = avg_df.loc[:,\n # ['Nalt_U50_Ai14_OPRK1_C1_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Nalt_U50_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Nalt_U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Nalt_U50_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Nalt_U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Naltr_U50_Ai14_OPRK1_C2_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'Nalt_U50_Ai14_OPRK1_C1_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]\n # avg_df['Avg Vel Naltr'] = only_naltr.mean(axis=1)\n # avg_df['Avg Vel Naltr SEM'] = stats.sem(only_naltr, axis=1)\n # plt.plot(avg_df['Time'], avg_df['Avg Vel Naltr'], color='red', linewidth=1, label='Average Velocity 3mgkg Naltr+5mgkg U50')\n #\n #\n \"\"\"U50 Data\"\"\"\n\n vel_det(file='U50_Ai14_OPRK1_C1_F0_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='F0 5mgkg U50', line_color='steelblue')\n vel_det(file='U50_Ai14_OPRK1_C1_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='F1 5mgkg U50', line_color='deepskyblue')\n vel_det(file='U50_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='F2 5mgkg U50', line_color='powderblue')\n\n vel_det(file='U50_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='M1 5mgkg U50', line_color='blue')\n vel_det(file='U50_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='M2 5mgkg U50', line_color='blue')\n vel_det(file='U50_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='M3 5mgkg U50', line_color='lightblue')\n vel_det(file='U50_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n legend_label='M4 5mgkg U50', line_color='turquoise')\n\n # only_U50 = avg_df.loc[:,\n # ['U50_Ai14_OPRK1_C1_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C1_F0_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C1_M1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C1_M2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C2_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5',\n # 'U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered - Copy.h5']]\n # avg_df['Avg Vel U50'] = only_U50.mean(axis=1)\n # avg_df['Avg Vel U50 SEM'] = stats.sem(only_U50, axis=1)\n # plt.plot(avg_df['Time'], avg_df['Avg Vel U50'], color='orange', linewidth=1, label='Average Velocity Saline+5mgkg U50')\n #\n #\n \"\"\"NORBNI U50 Data\"\"\"\n #\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F0_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F0 10mgkg NORBNI+5mgkg U50', line_color='orange')\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F1_sDLC_resnet50_SideViewNov1shuffle1_180000filtered.h5',\n # legend_label='F1 10mgkg NORBNI+5mgkg U50', line_color='darkorange')\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F2_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F2 10mgkg NORBNI+5mgkg U50', line_color='coral')\n #\n #\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M1_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M1 10mgkg NORBNI+5mgkg U50', line_color='orange')\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M2_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M2 10mgkg NORBNI+5mgkg U50', line_color='orange')\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M3_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M3 10mgkg NORBNI+5mgkg U50', line_color='orange') #tiger color\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C1_M4_SDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M4 10mgkg NORBNI+5mkg U50', line_color='#ed8203') #apricot color\n\n # only_NORBNI = avg_df.loc[:,\n # [\n # 'NORBNI_U50_Ai14_OPRK1_C2_F1_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',\n # 'NORBNI_U50_Ai14_OPRK1_C2_F2_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',\n # 'NORBNI_U50_Ai14_OPRK1_C1_M3_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5',\n # 'NORBNI_U50_Ai14_OPRK1_C1_M4_Top DownDLC_resnet50_BigBinTopSep17shuffle1_250000filtered.h5'\n # ]]\n # avg_df['Avg Vel NORBNI'] = only_NORBNI.mean(axis=1)\n # avg_df['Avg Vel NORBNI SEM'] = stats.sem(only_NORBNI, axis=1)\n # plt.plot(avg_df['Time'], avg_df['Avg Vel NORBNI'], color='blue', linewidth=1,\n # label='Average Velocity 10mgkg NORBNI +5mgkg U50')\n #\n \"\"\"NORBNI Saline\"\"\"\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C2_F1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F1 10mgkg NORBNI+Saline', line_color='purple')\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C2_F2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F2 10mgkg NORBNI+Saline', line_color='purple')\n # vel_det(file='NORBNI_U50_Ai14_OPRK1_C2_F0_sDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='F0 10mgkg NORBNI+Saline', line_color='violet')\n #\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M1_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M1 10mgkg NORBNI+Saline', line_color='blueviolet')\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M2_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M2 10mgkg NORBNI+Saline', line_color='blueviolet')\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M4_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M4 10mkg NORBNI+Saline', line_color='mediumorchid')\n # vel_det(file='NORBNI_Saline_Ai14_OPRK1_C1_M3_side viewDLC_resnet50_SideViewNov1shuffle1_180000.h5',\n # legend_label='M3 10mgkg NORBNI+Saline', line_color='purple')\n #\n # plt.fill_between(avg_df['Time'], avg_df[\"Avg Vel Saline\"]-avg_df[\"Avg Vel Saline SEM\"],\n # avg_df[\"Avg Vel Saline\"]+avg_df[\"Avg Vel Saline SEM\"], alpha=0.25, facecolor='black', edgecolor='black')\n # plt.fill_between(avg_df['Time'], avg_df[\"Avg Vel Naltr\"]-avg_df[\"Avg Vel Naltr SEM\"],\n # avg_df[\"Avg Vel Naltr\"]+avg_df[\"Avg Vel Naltr SEM\"], alpha=0.25, facecolor='red', edgecolor='red')\n # plt.fill_between(avg_df['Time'], avg_df[\"Avg Vel U50\"]-avg_df[\"Avg Vel U50 SEM\"],\n # avg_df[\"Avg Vel U50\"]+avg_df[\"Avg Vel U50 SEM\"], alpha=0.25, facecolor='orange', edgecolor='orange')\n # plt.fill_between(avg_df['Time'], avg_df[\"Avg Vel NORBNI\"]-avg_df[\"Avg Vel NORBNI SEM\"],\n # avg_df[\"Avg Vel NORBNI\"]+avg_df[\"Avg Vel NORBNI SEM\"], alpha=0.25, facecolor='blue', edgecolor='blue')\n # plt.plot()\n # leg = plt.legend()\n # font = {'family': 'Arial',\n # 'size': 12}\n # plt.rc('font', **font)\n # plt.rc('lines', linewidth = 1)\n # for i in leg.legendHandles:\n # i.set_linewidth(3)\n # plt.xlabel('time (minutes)', fontsize=12)\n # plt.ylabel('pixel', fontsize=12)\n # plt.title('F2 NORBNI, NORBNI+U50, Saline Head Inverted Y-coordinate')\n # plt.show()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import sys
import os
import utils
def run(name, dim_k, dump='dump', add_cmd=''):
res = all_res[name]
model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'
cmd = f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'
print(cmd)
ret = os.system(cmd)
if ret != 0:
input('Error!!!!!!')
all_res = dict(
id_att_3='id_att_3',
id_last='id_last',
c_att_5='c_att_5',
c_last='c_last',
)
def main():
run('id_att_3', 1024, dump='dump')
run('id_last', 1024, dump='dump')
run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')
run('c_last', 256, dump='dump')
run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')
run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')
run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')
run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "548a236c4c485091d312593dcb0fa331ff98f1a8",
"index": 6359,
"step-1": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\n<mask token>\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\nall_res = dict(id_att_3='id_att_3', id_last='id_last', c_att_5='c_att_5',\n c_last='c_last')\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport os\nimport utils\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\nall_res = dict(id_att_3='id_att_3', id_last='id_last', c_att_5='c_att_5',\n c_last='c_last')\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import sys\nimport os\nimport utils\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n\n cmd = f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n print(cmd)\n\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\nall_res = dict(\n id_att_3='id_att_3',\n id_last='id_last',\n\n c_att_5='c_att_5',\n c_last='c_last',\n)\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\n\nif __name__ == '__main__':\n main()",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
# Generated by Django 3.0.5 on 2020-04-30 06:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products_app', '0003_auto_20200429_0739'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254)),
],
),
migrations.RemoveField(
model_name='item',
name='stock',
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items', models.ManyToManyField(to='products_app.Item')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='products_app.User')),
],
),
]
|
normal
|
{
"blob_id": "cdc8c8aba384b7b1b5e741ffe4309eaee30aaada",
"index": 5405,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('products_app', '0003_auto_20200429_0739')]\n operations = [migrations.CreateModel(name='User', fields=[('id', models\n .AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('name', models.CharField(max_length=100)), (\n 'email', models.EmailField(max_length=254))]), migrations.\n RemoveField(model_name='item', name='stock'), migrations.\n CreateModel(name='Order', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('items', models.ManyToManyField(to='products_app.Item')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to='products_app.User'))])]\n",
"step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('products_app', '0003_auto_20200429_0739')]\n operations = [migrations.CreateModel(name='User', fields=[('id', models\n .AutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('name', models.CharField(max_length=100)), (\n 'email', models.EmailField(max_length=254))]), migrations.\n RemoveField(model_name='item', name='stock'), migrations.\n CreateModel(name='Order', fields=[('id', models.AutoField(\n auto_created=True, primary_key=True, serialize=False, verbose_name=\n 'ID')), ('items', models.ManyToManyField(to='products_app.Item')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.\n CASCADE, to='products_app.User'))])]\n",
"step-5": "# Generated by Django 3.0.5 on 2020-04-30 06:26\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('products_app', '0003_auto_20200429_0739'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='User',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=100)),\n ('email', models.EmailField(max_length=254)),\n ],\n ),\n migrations.RemoveField(\n model_name='item',\n name='stock',\n ),\n migrations.CreateModel(\n name='Order',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('items', models.ManyToManyField(to='products_app.Item')),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='products_app.User')),\n ],\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from distutils.core import setup
setup(
name="zuknuft",
version="0.1",
author="riotbib",
author_email="riotbib@github",
scripts=["zukunft.py"],
install_requires=[
'bottle',
],
)
|
normal
|
{
"blob_id": "638842cda666100ce197437cb354f66de77eb328",
"index": 8065,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='zuknuft', version='0.1', author='riotbib', author_email=\n 'riotbib@github', scripts=['zukunft.py'], install_requires=['bottle'])\n",
"step-3": "from distutils.core import setup\nsetup(name='zuknuft', version='0.1', author='riotbib', author_email=\n 'riotbib@github', scripts=['zukunft.py'], install_requires=['bottle'])\n",
"step-4": "from distutils.core import setup\n\nsetup(\n name=\"zuknuft\",\n version=\"0.1\",\n author=\"riotbib\",\n author_email=\"riotbib@github\",\n scripts=[\"zukunft.py\"],\n install_requires=[\n 'bottle',\n ],\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.screenmanager import ScreenManager, Screen
class Gerenciador(ScreenManager):
pass
class Menu(Screen):
pass
class Tarefas(Screen):
def __init__(self, tarefas=[], **kwargs):
super().__init__(**kwargs)
for tarefa in tarefas:
self.ids.box.add_widget(Tarefa(text=tarefa))
def addWidget(self):
texto = self.ids.texto.text
self.ids.box.add_widget(Tarefa(text=texto))
self.ids.texto.text = ''
class Tarefa(BoxLayout):
def __init__(self, text='', **kwargs):
super().__init__(**kwargs)
self.ids.label.text = text
class Test(App):
def build(self):
return Gerenciador()
Test().run()
|
normal
|
{
"blob_id": "66b42791325a53172d4514cdd16ccd58d4edb186",
"index": 2409,
"step-1": "<mask token>\n\n\nclass Tarefas(Screen):\n <mask token>\n <mask token>\n\n\nclass Tarefa(BoxLayout):\n\n def __init__(self, text='', **kwargs):\n super().__init__(**kwargs)\n self.ids.label.text = text\n\n\nclass Test(App):\n\n def build(self):\n return Gerenciador()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Menu(Screen):\n pass\n\n\nclass Tarefas(Screen):\n\n def __init__(self, tarefas=[], **kwargs):\n super().__init__(**kwargs)\n for tarefa in tarefas:\n self.ids.box.add_widget(Tarefa(text=tarefa))\n\n def addWidget(self):\n texto = self.ids.texto.text\n self.ids.box.add_widget(Tarefa(text=texto))\n self.ids.texto.text = ''\n\n\nclass Tarefa(BoxLayout):\n\n def __init__(self, text='', **kwargs):\n super().__init__(**kwargs)\n self.ids.label.text = text\n\n\nclass Test(App):\n\n def build(self):\n return Gerenciador()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Gerenciador(ScreenManager):\n pass\n\n\nclass Menu(Screen):\n pass\n\n\nclass Tarefas(Screen):\n\n def __init__(self, tarefas=[], **kwargs):\n super().__init__(**kwargs)\n for tarefa in tarefas:\n self.ids.box.add_widget(Tarefa(text=tarefa))\n\n def addWidget(self):\n texto = self.ids.texto.text\n self.ids.box.add_widget(Tarefa(text=texto))\n self.ids.texto.text = ''\n\n\nclass Tarefa(BoxLayout):\n\n def __init__(self, text='', **kwargs):\n super().__init__(**kwargs)\n self.ids.label.text = text\n\n\nclass Test(App):\n\n def build(self):\n return Gerenciador()\n\n\n<mask token>\n",
"step-4": "from kivy.app import App\nfrom kivy.uix.boxlayout import BoxLayout\nfrom kivy.uix.screenmanager import ScreenManager, Screen\n\n\nclass Gerenciador(ScreenManager):\n pass\n\n\nclass Menu(Screen):\n pass\n\n\nclass Tarefas(Screen):\n\n def __init__(self, tarefas=[], **kwargs):\n super().__init__(**kwargs)\n for tarefa in tarefas:\n self.ids.box.add_widget(Tarefa(text=tarefa))\n\n def addWidget(self):\n texto = self.ids.texto.text\n self.ids.box.add_widget(Tarefa(text=texto))\n self.ids.texto.text = ''\n\n\nclass Tarefa(BoxLayout):\n\n def __init__(self, text='', **kwargs):\n super().__init__(**kwargs)\n self.ids.label.text = text\n\n\nclass Test(App):\n\n def build(self):\n return Gerenciador()\n\n\nTest().run()\n",
"step-5": null,
"step-ids": [
5,
8,
9,
11
]
}
|
[
5,
8,
9,
11
] |
import cv2 as cv
img = cv.imread('images/gradient.png', 0)
_,th1 = cv.threshold(img, 127,255, cv.THRESH_BINARY)
_,th2 = cv.threshold(img, 127, 255, cv.THRESH_BINARY_INV)
_,th3 = cv.threshold(img, 127, 255, cv.THRESH_TRUNC) #freeze the pixel color after the threshold
_,th4 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO) #less to threshold will be zero
_,th5 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO_INV) #if the value of the pixel is greater than threshold it will be zero
cv.imshow("Threshold Trunc", th3)
cv.imshow("Threshold2", th2)
cv.imshow("Threshold", th1)
cv.imshow("Image",img)
cv.imshow("th4", th4)
cv.imshow("th5", th5)
cv.waitKey(0)
cv.destroyAllWindows()
|
normal
|
{
"blob_id": "6f356840944e11f52a280262697d7e33b3cca650",
"index": 2319,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncv.imshow('Threshold Trunc', th3)\ncv.imshow('Threshold2', th2)\ncv.imshow('Threshold', th1)\ncv.imshow('Image', img)\ncv.imshow('th4', th4)\ncv.imshow('th5', th5)\ncv.waitKey(0)\ncv.destroyAllWindows()\n",
"step-3": "<mask token>\nimg = cv.imread('images/gradient.png', 0)\n_, th1 = cv.threshold(img, 127, 255, cv.THRESH_BINARY)\n_, th2 = cv.threshold(img, 127, 255, cv.THRESH_BINARY_INV)\n_, th3 = cv.threshold(img, 127, 255, cv.THRESH_TRUNC)\n_, th4 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO)\n_, th5 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO_INV)\ncv.imshow('Threshold Trunc', th3)\ncv.imshow('Threshold2', th2)\ncv.imshow('Threshold', th1)\ncv.imshow('Image', img)\ncv.imshow('th4', th4)\ncv.imshow('th5', th5)\ncv.waitKey(0)\ncv.destroyAllWindows()\n",
"step-4": "import cv2 as cv\nimg = cv.imread('images/gradient.png', 0)\n_, th1 = cv.threshold(img, 127, 255, cv.THRESH_BINARY)\n_, th2 = cv.threshold(img, 127, 255, cv.THRESH_BINARY_INV)\n_, th3 = cv.threshold(img, 127, 255, cv.THRESH_TRUNC)\n_, th4 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO)\n_, th5 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO_INV)\ncv.imshow('Threshold Trunc', th3)\ncv.imshow('Threshold2', th2)\ncv.imshow('Threshold', th1)\ncv.imshow('Image', img)\ncv.imshow('th4', th4)\ncv.imshow('th5', th5)\ncv.waitKey(0)\ncv.destroyAllWindows()\n",
"step-5": "import cv2 as cv\n\nimg = cv.imread('images/gradient.png', 0)\n_,th1 = cv.threshold(img, 127,255, cv.THRESH_BINARY)\n_,th2 = cv.threshold(img, 127, 255, cv.THRESH_BINARY_INV)\n_,th3 = cv.threshold(img, 127, 255, cv.THRESH_TRUNC) #freeze the pixel color after the threshold\n_,th4 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO) #less to threshold will be zero\n_,th5 = cv.threshold(img, 127, 255, cv.THRESH_TOZERO_INV) #if the value of the pixel is greater than threshold it will be zero\n\ncv.imshow(\"Threshold Trunc\", th3)\ncv.imshow(\"Threshold2\", th2)\ncv.imshow(\"Threshold\", th1)\ncv.imshow(\"Image\",img)\ncv.imshow(\"th4\", th4)\ncv.imshow(\"th5\", th5)\n\ncv.waitKey(0)\ncv.destroyAllWindows()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import random
import re
from datetime import datetime, timedelta
from threading import Lock
from telegram.ext import run_async
from src.models.user import UserDB
from src.models.user_stat import UserStat
from src.utils.cache import cache, USER_CACHE_EXPIRE
from src.utils.logger_helpers import get_logger
logger = get_logger(__name__)
class PidorWeekly:
lock = Lock()
re_words = re.compile(
r"\b(ге[йяи]|геев|анал|аналы|аналь\S+|анус|очко|жоп[ау]|жопой|поп[ау]|попой|попк[ау]|попкой|говн[оа]|говном|пенис\S*|член\S*|пизд\S+|гомос\S+|гомик\S*|\S+сексуал\S*|климов\S*|педерас\S+|пидор\S*|пидар\S*|педик\S+|подвор\S+|iphone\S*|айфон\S*|samsung|самсунг\S*|смузи|барбер\S*|рокет\S*|хипстер\S*|лгбт\S*|бабочк\S+|м[ао]к[ао]син\S*|ахтунг\S*|толерант\S+|политкорр?ект\S+|стрижк\S+|бород\S+|аниме\S*|саратов\S*|фемк\S+|\S+изм\S*|dtf|дтф|в[еэ]йп\S*|гироскутер\S*|мизог\S+|козел|козл\S+|муда[кч]\S*|сволоч\S+|ресторан\S*|кача[лт]\S+|мыло|читер\S*|читы?|культур\S+|сра[тл]\S+|насра[тл]\S+|гад\S*|блогг?ер\S*)\b",
re.IGNORECASE)
re_inside = re.compile(r"п[еи]д[оа]р\S*", re.IGNORECASE)
@classmethod
def get_top_pidor(cls, cid, date=None):
monday = cls.__get_current_monday() if date is None else cls.__get_date_monday(date)
db = cls.__get_db(monday, cid)
stats = UserStat.get_chat_stats(cid, date)
# подсчитаем всех по отношению пидор-слов к общему количеству слов этого участника
pidor_by_count = {}
for user_stat, user in stats:
count = user_stat.all_messages_count
# учитываем только тек, кто написал от 30 сообщений
if count < 30 or user_stat.words_count < 500:
continue
if user.uid not in db:
continue
pidor_by_count[user.uid] = db[user.uid] / count
if len(pidor_by_count) > 0:
uid, _ = cls.__sort_dict(pidor_by_count)[0]
elif len(stats) == 0:
return None
else:
_, user = random.choice(stats)
uid = user.uid
return uid
@classmethod
@run_async
def parse_message(cls, message):
msg = message.text
if msg is None:
return
uid = message.from_user.id
cid = message.chat_id
entities = message.parse_entities()
if not cls.__has_pidor(msg):
return
cls.__add(uid, cid)
if message.reply_to_message is not None:
to_uid = message.reply_to_message.from_user.id
cls.__add(to_uid, cid, replay=True)
for entity, entity_text in entities.items():
if entity.type == 'mention':
username = entity_text.lstrip('@').strip()
try:
mentioned_user_uid = UserDB.get_uid_by_username(username)
if mentioned_user_uid:
cls.__add(mentioned_user_uid, cid, replay=True)
except Exception:
pass
continue
if entity.type == 'text_mention':
cls.__add(entity.user.id, cid, replay=True)
continue
@classmethod
def __has_pidor(cls, msg):
msg_lower = msg.lower().replace('ё', 'е')
if cls.re_words.search(msg_lower):
return True
if cls.re_inside.search(msg_lower):
return True
return False
@classmethod
def __add(cls, uid, cid, date=None, replay=False):
monday = cls.__get_current_monday() if date is None else cls.__get_date_monday(date)
logger.debug(f'lock {cid}:{uid}')
with cls.lock:
db = cls.__get_db(monday, cid)
value = 1
if replay is True:
value = 0.4
if uid in db:
db[uid] += value
else:
db[uid] = value
cls.__set_db(db, monday, cid)
@staticmethod
def __sort_dict(d):
return sorted(d.items(), key=lambda x: x[1], reverse=True)
@staticmethod
def __get_cache_key(monday, cid):
return f'pidorweekly:{monday.strftime("%Y%m%d")}:{cid}'
@staticmethod
def __get_date_monday(date):
monday = date - timedelta(days=date.weekday())
return monday.replace(hour=0, minute=0, second=0, microsecond=0)
@classmethod
def __get_current_monday(cls):
return cls.__get_date_monday(datetime.today())
@classmethod
def __get_db(cls, monday, cid):
cached = cache.get(cls.__get_cache_key(monday, cid))
if cached:
return cached
return {}
@classmethod
def __set_db(cls, newdb, monday, cid):
cache.set(cls.__get_cache_key(monday, cid), newdb, time=USER_CACHE_EXPIRE)
|
normal
|
{
"blob_id": "109ca06685eece74034f77a98b1d7172a17aca21",
"index": 7469,
"step-1": "<mask token>\n\n\nclass PidorWeekly:\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def get_top_pidor(cls, cid, date=None):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n db = cls.__get_db(monday, cid)\n stats = UserStat.get_chat_stats(cid, date)\n pidor_by_count = {}\n for user_stat, user in stats:\n count = user_stat.all_messages_count\n if count < 30 or user_stat.words_count < 500:\n continue\n if user.uid not in db:\n continue\n pidor_by_count[user.uid] = db[user.uid] / count\n if len(pidor_by_count) > 0:\n uid, _ = cls.__sort_dict(pidor_by_count)[0]\n elif len(stats) == 0:\n return None\n else:\n _, user = random.choice(stats)\n uid = user.uid\n return uid\n\n @classmethod\n @run_async\n def parse_message(cls, message):\n msg = message.text\n if msg is None:\n return\n uid = message.from_user.id\n cid = message.chat_id\n entities = message.parse_entities()\n if not cls.__has_pidor(msg):\n return\n cls.__add(uid, cid)\n if message.reply_to_message is not None:\n to_uid = message.reply_to_message.from_user.id\n cls.__add(to_uid, cid, replay=True)\n for entity, entity_text in entities.items():\n if entity.type == 'mention':\n username = entity_text.lstrip('@').strip()\n try:\n mentioned_user_uid = UserDB.get_uid_by_username(username)\n if mentioned_user_uid:\n cls.__add(mentioned_user_uid, cid, replay=True)\n except Exception:\n pass\n continue\n if entity.type == 'text_mention':\n cls.__add(entity.user.id, cid, replay=True)\n continue\n <mask token>\n\n @classmethod\n def __add(cls, uid, cid, date=None, replay=False):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n logger.debug(f'lock {cid}:{uid}')\n with cls.lock:\n db = cls.__get_db(monday, cid)\n value = 1\n if replay is True:\n value = 0.4\n if uid in db:\n db[uid] += value\n else:\n db[uid] = value\n cls.__set_db(db, monday, cid)\n\n @staticmethod\n def __sort_dict(d):\n return sorted(d.items(), key=lambda x: x[1], reverse=True)\n\n @staticmethod\n def __get_cache_key(monday, cid):\n return f\"pidorweekly:{monday.strftime('%Y%m%d')}:{cid}\"\n\n @staticmethod\n def __get_date_monday(date):\n monday = date - timedelta(days=date.weekday())\n return monday.replace(hour=0, minute=0, second=0, microsecond=0)\n\n @classmethod\n def __get_current_monday(cls):\n return cls.__get_date_monday(datetime.today())\n <mask token>\n\n @classmethod\n def __set_db(cls, newdb, monday, cid):\n cache.set(cls.__get_cache_key(monday, cid), newdb, time=\n USER_CACHE_EXPIRE)\n",
"step-2": "<mask token>\n\n\nclass PidorWeekly:\n lock = Lock()\n re_words = re.compile(\n '\\\\b(ге[йяи]|геев|анал|аналы|аналь\\\\S+|анус|очко|жоп[ау]|жопой|поп[ау]|попой|попк[ау]|попкой|говн[оа]|говном|пенис\\\\S*|член\\\\S*|пизд\\\\S+|гомос\\\\S+|гомик\\\\S*|\\\\S+сексуал\\\\S*|климов\\\\S*|педерас\\\\S+|пидор\\\\S*|пидар\\\\S*|педик\\\\S+|подвор\\\\S+|iphone\\\\S*|айфон\\\\S*|samsung|самсунг\\\\S*|смузи|барбер\\\\S*|рокет\\\\S*|хипстер\\\\S*|лгбт\\\\S*|бабочк\\\\S+|м[ао]к[ао]син\\\\S*|ахтунг\\\\S*|толерант\\\\S+|политкорр?ект\\\\S+|стрижк\\\\S+|бород\\\\S+|аниме\\\\S*|саратов\\\\S*|фемк\\\\S+|\\\\S+изм\\\\S*|dtf|дтф|в[еэ]йп\\\\S*|гироскутер\\\\S*|мизог\\\\S+|козел|козл\\\\S+|муда[кч]\\\\S*|сволоч\\\\S+|ресторан\\\\S*|кача[лт]\\\\S+|мыло|читер\\\\S*|читы?|культур\\\\S+|сра[тл]\\\\S+|насра[тл]\\\\S+|гад\\\\S*|блогг?ер\\\\S*)\\\\b'\n , re.IGNORECASE)\n re_inside = re.compile('п[еи]д[оа]р\\\\S*', re.IGNORECASE)\n\n @classmethod\n def get_top_pidor(cls, cid, date=None):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n db = cls.__get_db(monday, cid)\n stats = UserStat.get_chat_stats(cid, date)\n pidor_by_count = {}\n for user_stat, user in stats:\n count = user_stat.all_messages_count\n if count < 30 or user_stat.words_count < 500:\n continue\n if user.uid not in db:\n continue\n pidor_by_count[user.uid] = db[user.uid] / count\n if len(pidor_by_count) > 0:\n uid, _ = cls.__sort_dict(pidor_by_count)[0]\n elif len(stats) == 0:\n return None\n else:\n _, user = random.choice(stats)\n uid = user.uid\n return uid\n\n @classmethod\n @run_async\n def parse_message(cls, message):\n msg = message.text\n if msg is None:\n return\n uid = message.from_user.id\n cid = message.chat_id\n entities = message.parse_entities()\n if not cls.__has_pidor(msg):\n return\n cls.__add(uid, cid)\n if message.reply_to_message is not None:\n to_uid = message.reply_to_message.from_user.id\n cls.__add(to_uid, cid, replay=True)\n for entity, entity_text in entities.items():\n if entity.type == 'mention':\n username = entity_text.lstrip('@').strip()\n try:\n mentioned_user_uid = UserDB.get_uid_by_username(username)\n if mentioned_user_uid:\n cls.__add(mentioned_user_uid, cid, replay=True)\n except Exception:\n pass\n continue\n if entity.type == 'text_mention':\n cls.__add(entity.user.id, cid, replay=True)\n continue\n\n @classmethod\n def __has_pidor(cls, msg):\n msg_lower = msg.lower().replace('ё', 'е')\n if cls.re_words.search(msg_lower):\n return True\n if cls.re_inside.search(msg_lower):\n return True\n return False\n\n @classmethod\n def __add(cls, uid, cid, date=None, replay=False):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n logger.debug(f'lock {cid}:{uid}')\n with cls.lock:\n db = cls.__get_db(monday, cid)\n value = 1\n if replay is True:\n value = 0.4\n if uid in db:\n db[uid] += value\n else:\n db[uid] = value\n cls.__set_db(db, monday, cid)\n\n @staticmethod\n def __sort_dict(d):\n return sorted(d.items(), key=lambda x: x[1], reverse=True)\n\n @staticmethod\n def __get_cache_key(monday, cid):\n return f\"pidorweekly:{monday.strftime('%Y%m%d')}:{cid}\"\n\n @staticmethod\n def __get_date_monday(date):\n monday = date - timedelta(days=date.weekday())\n return monday.replace(hour=0, minute=0, second=0, microsecond=0)\n\n @classmethod\n def __get_current_monday(cls):\n return cls.__get_date_monday(datetime.today())\n\n @classmethod\n def __get_db(cls, monday, cid):\n cached = cache.get(cls.__get_cache_key(monday, cid))\n if cached:\n return cached\n return {}\n\n @classmethod\n def __set_db(cls, newdb, monday, cid):\n cache.set(cls.__get_cache_key(monday, cid), newdb, time=\n USER_CACHE_EXPIRE)\n",
"step-3": "<mask token>\nlogger = get_logger(__name__)\n\n\nclass PidorWeekly:\n lock = Lock()\n re_words = re.compile(\n '\\\\b(ге[йяи]|геев|анал|аналы|аналь\\\\S+|анус|очко|жоп[ау]|жопой|поп[ау]|попой|попк[ау]|попкой|говн[оа]|говном|пенис\\\\S*|член\\\\S*|пизд\\\\S+|гомос\\\\S+|гомик\\\\S*|\\\\S+сексуал\\\\S*|климов\\\\S*|педерас\\\\S+|пидор\\\\S*|пидар\\\\S*|педик\\\\S+|подвор\\\\S+|iphone\\\\S*|айфон\\\\S*|samsung|самсунг\\\\S*|смузи|барбер\\\\S*|рокет\\\\S*|хипстер\\\\S*|лгбт\\\\S*|бабочк\\\\S+|м[ао]к[ао]син\\\\S*|ахтунг\\\\S*|толерант\\\\S+|политкорр?ект\\\\S+|стрижк\\\\S+|бород\\\\S+|аниме\\\\S*|саратов\\\\S*|фемк\\\\S+|\\\\S+изм\\\\S*|dtf|дтф|в[еэ]йп\\\\S*|гироскутер\\\\S*|мизог\\\\S+|козел|козл\\\\S+|муда[кч]\\\\S*|сволоч\\\\S+|ресторан\\\\S*|кача[лт]\\\\S+|мыло|читер\\\\S*|читы?|культур\\\\S+|сра[тл]\\\\S+|насра[тл]\\\\S+|гад\\\\S*|блогг?ер\\\\S*)\\\\b'\n , re.IGNORECASE)\n re_inside = re.compile('п[еи]д[оа]р\\\\S*', re.IGNORECASE)\n\n @classmethod\n def get_top_pidor(cls, cid, date=None):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n db = cls.__get_db(monday, cid)\n stats = UserStat.get_chat_stats(cid, date)\n pidor_by_count = {}\n for user_stat, user in stats:\n count = user_stat.all_messages_count\n if count < 30 or user_stat.words_count < 500:\n continue\n if user.uid not in db:\n continue\n pidor_by_count[user.uid] = db[user.uid] / count\n if len(pidor_by_count) > 0:\n uid, _ = cls.__sort_dict(pidor_by_count)[0]\n elif len(stats) == 0:\n return None\n else:\n _, user = random.choice(stats)\n uid = user.uid\n return uid\n\n @classmethod\n @run_async\n def parse_message(cls, message):\n msg = message.text\n if msg is None:\n return\n uid = message.from_user.id\n cid = message.chat_id\n entities = message.parse_entities()\n if not cls.__has_pidor(msg):\n return\n cls.__add(uid, cid)\n if message.reply_to_message is not None:\n to_uid = message.reply_to_message.from_user.id\n cls.__add(to_uid, cid, replay=True)\n for entity, entity_text in entities.items():\n if entity.type == 'mention':\n username = entity_text.lstrip('@').strip()\n try:\n mentioned_user_uid = UserDB.get_uid_by_username(username)\n if mentioned_user_uid:\n cls.__add(mentioned_user_uid, cid, replay=True)\n except Exception:\n pass\n continue\n if entity.type == 'text_mention':\n cls.__add(entity.user.id, cid, replay=True)\n continue\n\n @classmethod\n def __has_pidor(cls, msg):\n msg_lower = msg.lower().replace('ё', 'е')\n if cls.re_words.search(msg_lower):\n return True\n if cls.re_inside.search(msg_lower):\n return True\n return False\n\n @classmethod\n def __add(cls, uid, cid, date=None, replay=False):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n logger.debug(f'lock {cid}:{uid}')\n with cls.lock:\n db = cls.__get_db(monday, cid)\n value = 1\n if replay is True:\n value = 0.4\n if uid in db:\n db[uid] += value\n else:\n db[uid] = value\n cls.__set_db(db, monday, cid)\n\n @staticmethod\n def __sort_dict(d):\n return sorted(d.items(), key=lambda x: x[1], reverse=True)\n\n @staticmethod\n def __get_cache_key(monday, cid):\n return f\"pidorweekly:{monday.strftime('%Y%m%d')}:{cid}\"\n\n @staticmethod\n def __get_date_monday(date):\n monday = date - timedelta(days=date.weekday())\n return monday.replace(hour=0, minute=0, second=0, microsecond=0)\n\n @classmethod\n def __get_current_monday(cls):\n return cls.__get_date_monday(datetime.today())\n\n @classmethod\n def __get_db(cls, monday, cid):\n cached = cache.get(cls.__get_cache_key(monday, cid))\n if cached:\n return cached\n return {}\n\n @classmethod\n def __set_db(cls, newdb, monday, cid):\n cache.set(cls.__get_cache_key(monday, cid), newdb, time=\n USER_CACHE_EXPIRE)\n",
"step-4": "import random\nimport re\nfrom datetime import datetime, timedelta\nfrom threading import Lock\nfrom telegram.ext import run_async\nfrom src.models.user import UserDB\nfrom src.models.user_stat import UserStat\nfrom src.utils.cache import cache, USER_CACHE_EXPIRE\nfrom src.utils.logger_helpers import get_logger\nlogger = get_logger(__name__)\n\n\nclass PidorWeekly:\n lock = Lock()\n re_words = re.compile(\n '\\\\b(ге[йяи]|геев|анал|аналы|аналь\\\\S+|анус|очко|жоп[ау]|жопой|поп[ау]|попой|попк[ау]|попкой|говн[оа]|говном|пенис\\\\S*|член\\\\S*|пизд\\\\S+|гомос\\\\S+|гомик\\\\S*|\\\\S+сексуал\\\\S*|климов\\\\S*|педерас\\\\S+|пидор\\\\S*|пидар\\\\S*|педик\\\\S+|подвор\\\\S+|iphone\\\\S*|айфон\\\\S*|samsung|самсунг\\\\S*|смузи|барбер\\\\S*|рокет\\\\S*|хипстер\\\\S*|лгбт\\\\S*|бабочк\\\\S+|м[ао]к[ао]син\\\\S*|ахтунг\\\\S*|толерант\\\\S+|политкорр?ект\\\\S+|стрижк\\\\S+|бород\\\\S+|аниме\\\\S*|саратов\\\\S*|фемк\\\\S+|\\\\S+изм\\\\S*|dtf|дтф|в[еэ]йп\\\\S*|гироскутер\\\\S*|мизог\\\\S+|козел|козл\\\\S+|муда[кч]\\\\S*|сволоч\\\\S+|ресторан\\\\S*|кача[лт]\\\\S+|мыло|читер\\\\S*|читы?|культур\\\\S+|сра[тл]\\\\S+|насра[тл]\\\\S+|гад\\\\S*|блогг?ер\\\\S*)\\\\b'\n , re.IGNORECASE)\n re_inside = re.compile('п[еи]д[оа]р\\\\S*', re.IGNORECASE)\n\n @classmethod\n def get_top_pidor(cls, cid, date=None):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n db = cls.__get_db(monday, cid)\n stats = UserStat.get_chat_stats(cid, date)\n pidor_by_count = {}\n for user_stat, user in stats:\n count = user_stat.all_messages_count\n if count < 30 or user_stat.words_count < 500:\n continue\n if user.uid not in db:\n continue\n pidor_by_count[user.uid] = db[user.uid] / count\n if len(pidor_by_count) > 0:\n uid, _ = cls.__sort_dict(pidor_by_count)[0]\n elif len(stats) == 0:\n return None\n else:\n _, user = random.choice(stats)\n uid = user.uid\n return uid\n\n @classmethod\n @run_async\n def parse_message(cls, message):\n msg = message.text\n if msg is None:\n return\n uid = message.from_user.id\n cid = message.chat_id\n entities = message.parse_entities()\n if not cls.__has_pidor(msg):\n return\n cls.__add(uid, cid)\n if message.reply_to_message is not None:\n to_uid = message.reply_to_message.from_user.id\n cls.__add(to_uid, cid, replay=True)\n for entity, entity_text in entities.items():\n if entity.type == 'mention':\n username = entity_text.lstrip('@').strip()\n try:\n mentioned_user_uid = UserDB.get_uid_by_username(username)\n if mentioned_user_uid:\n cls.__add(mentioned_user_uid, cid, replay=True)\n except Exception:\n pass\n continue\n if entity.type == 'text_mention':\n cls.__add(entity.user.id, cid, replay=True)\n continue\n\n @classmethod\n def __has_pidor(cls, msg):\n msg_lower = msg.lower().replace('ё', 'е')\n if cls.re_words.search(msg_lower):\n return True\n if cls.re_inside.search(msg_lower):\n return True\n return False\n\n @classmethod\n def __add(cls, uid, cid, date=None, replay=False):\n monday = cls.__get_current_monday(\n ) if date is None else cls.__get_date_monday(date)\n logger.debug(f'lock {cid}:{uid}')\n with cls.lock:\n db = cls.__get_db(monday, cid)\n value = 1\n if replay is True:\n value = 0.4\n if uid in db:\n db[uid] += value\n else:\n db[uid] = value\n cls.__set_db(db, monday, cid)\n\n @staticmethod\n def __sort_dict(d):\n return sorted(d.items(), key=lambda x: x[1], reverse=True)\n\n @staticmethod\n def __get_cache_key(monday, cid):\n return f\"pidorweekly:{monday.strftime('%Y%m%d')}:{cid}\"\n\n @staticmethod\n def __get_date_monday(date):\n monday = date - timedelta(days=date.weekday())\n return monday.replace(hour=0, minute=0, second=0, microsecond=0)\n\n @classmethod\n def __get_current_monday(cls):\n return cls.__get_date_monday(datetime.today())\n\n @classmethod\n def __get_db(cls, monday, cid):\n cached = cache.get(cls.__get_cache_key(monday, cid))\n if cached:\n return cached\n return {}\n\n @classmethod\n def __set_db(cls, newdb, monday, cid):\n cache.set(cls.__get_cache_key(monday, cid), newdb, time=\n USER_CACHE_EXPIRE)\n",
"step-5": "import random\nimport re\nfrom datetime import datetime, timedelta\nfrom threading import Lock\n\nfrom telegram.ext import run_async\n\nfrom src.models.user import UserDB\nfrom src.models.user_stat import UserStat\nfrom src.utils.cache import cache, USER_CACHE_EXPIRE\nfrom src.utils.logger_helpers import get_logger\n\nlogger = get_logger(__name__)\n\n\nclass PidorWeekly:\n lock = Lock()\n re_words = re.compile(\n r\"\\b(ге[йяи]|геев|анал|аналы|аналь\\S+|анус|очко|жоп[ау]|жопой|поп[ау]|попой|попк[ау]|попкой|говн[оа]|говном|пенис\\S*|член\\S*|пизд\\S+|гомос\\S+|гомик\\S*|\\S+сексуал\\S*|климов\\S*|педерас\\S+|пидор\\S*|пидар\\S*|педик\\S+|подвор\\S+|iphone\\S*|айфон\\S*|samsung|самсунг\\S*|смузи|барбер\\S*|рокет\\S*|хипстер\\S*|лгбт\\S*|бабочк\\S+|м[ао]к[ао]син\\S*|ахтунг\\S*|толерант\\S+|политкорр?ект\\S+|стрижк\\S+|бород\\S+|аниме\\S*|саратов\\S*|фемк\\S+|\\S+изм\\S*|dtf|дтф|в[еэ]йп\\S*|гироскутер\\S*|мизог\\S+|козел|козл\\S+|муда[кч]\\S*|сволоч\\S+|ресторан\\S*|кача[лт]\\S+|мыло|читер\\S*|читы?|культур\\S+|сра[тл]\\S+|насра[тл]\\S+|гад\\S*|блогг?ер\\S*)\\b\",\n re.IGNORECASE)\n re_inside = re.compile(r\"п[еи]д[оа]р\\S*\", re.IGNORECASE)\n\n @classmethod\n def get_top_pidor(cls, cid, date=None):\n monday = cls.__get_current_monday() if date is None else cls.__get_date_monday(date)\n db = cls.__get_db(monday, cid)\n stats = UserStat.get_chat_stats(cid, date)\n\n # подсчитаем всех по отношению пидор-слов к общему количеству слов этого участника\n pidor_by_count = {}\n for user_stat, user in stats:\n count = user_stat.all_messages_count\n # учитываем только тек, кто написал от 30 сообщений\n if count < 30 or user_stat.words_count < 500:\n continue\n if user.uid not in db:\n continue\n pidor_by_count[user.uid] = db[user.uid] / count\n\n if len(pidor_by_count) > 0:\n uid, _ = cls.__sort_dict(pidor_by_count)[0]\n elif len(stats) == 0:\n return None\n else:\n _, user = random.choice(stats)\n uid = user.uid\n return uid\n\n @classmethod\n @run_async\n def parse_message(cls, message):\n msg = message.text\n if msg is None:\n return\n uid = message.from_user.id\n cid = message.chat_id\n entities = message.parse_entities()\n\n if not cls.__has_pidor(msg):\n return\n cls.__add(uid, cid)\n\n if message.reply_to_message is not None:\n to_uid = message.reply_to_message.from_user.id\n cls.__add(to_uid, cid, replay=True)\n\n for entity, entity_text in entities.items():\n if entity.type == 'mention':\n username = entity_text.lstrip('@').strip()\n try:\n mentioned_user_uid = UserDB.get_uid_by_username(username)\n if mentioned_user_uid:\n cls.__add(mentioned_user_uid, cid, replay=True)\n except Exception:\n pass\n continue\n if entity.type == 'text_mention':\n cls.__add(entity.user.id, cid, replay=True)\n continue\n\n @classmethod\n def __has_pidor(cls, msg):\n msg_lower = msg.lower().replace('ё', 'е')\n if cls.re_words.search(msg_lower):\n return True\n if cls.re_inside.search(msg_lower):\n return True\n return False\n\n @classmethod\n def __add(cls, uid, cid, date=None, replay=False):\n monday = cls.__get_current_monday() if date is None else cls.__get_date_monday(date)\n logger.debug(f'lock {cid}:{uid}')\n with cls.lock:\n db = cls.__get_db(monday, cid)\n value = 1\n if replay is True:\n value = 0.4\n\n if uid in db:\n db[uid] += value\n else:\n db[uid] = value\n\n cls.__set_db(db, monday, cid)\n\n @staticmethod\n def __sort_dict(d):\n return sorted(d.items(), key=lambda x: x[1], reverse=True)\n\n @staticmethod\n def __get_cache_key(monday, cid):\n return f'pidorweekly:{monday.strftime(\"%Y%m%d\")}:{cid}'\n\n @staticmethod\n def __get_date_monday(date):\n monday = date - timedelta(days=date.weekday())\n return monday.replace(hour=0, minute=0, second=0, microsecond=0)\n\n @classmethod\n def __get_current_monday(cls):\n return cls.__get_date_monday(datetime.today())\n\n @classmethod\n def __get_db(cls, monday, cid):\n cached = cache.get(cls.__get_cache_key(monday, cid))\n if cached:\n return cached\n return {}\n\n @classmethod\n def __set_db(cls, newdb, monday, cid):\n cache.set(cls.__get_cache_key(monday, cid), newdb, time=USER_CACHE_EXPIRE)\n",
"step-ids": [
9,
12,
13,
14,
15
]
}
|
[
9,
12,
13,
14,
15
] |
print ("Hello"*5)
|
normal
|
{
"blob_id": "9ae7b6d081529a5c70b7362c852647b3638e7e98",
"index": 8105,
"step-1": "<mask token>\n",
"step-2": "print('Hello' * 5)\n",
"step-3": "print (\"Hello\"*5)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy.settings import CrawlerSettings
from scrapy import log, signals
from spiders.songspk_spider import SongsPKSpider
from scrapy.xlib.pydispatch import dispatcher
def stop_reactor():
reactor.stop()
dispatcher.connect(stop_reactor, signal=signals.spider_closed)
spider = SongsPKSpider(domain='aqaq.com')
crawler = Crawler(CrawlerSettings())
crawler.configure()
crawler.crawl(spider)
crawler.start()
log.start(loglevel=log.DEBUG)
log.msg("------------>Running reactor")
result = reactor.run()
print result
log.msg("------------>Running stoped")
|
normal
|
{
"blob_id": "0d14534b210b13ede4a687e418d05d756d221950",
"index": 3297,
"step-1": "from twisted.internet import reactor\nfrom scrapy.crawler import Crawler\nfrom scrapy.settings import CrawlerSettings\nfrom scrapy import log, signals\nfrom spiders.songspk_spider import SongsPKSpider\nfrom scrapy.xlib.pydispatch import dispatcher\n\ndef stop_reactor():\n reactor.stop()\n\ndispatcher.connect(stop_reactor, signal=signals.spider_closed)\n\nspider = SongsPKSpider(domain='aqaq.com')\ncrawler = Crawler(CrawlerSettings())\ncrawler.configure()\ncrawler.crawl(spider)\ncrawler.start()\nlog.start(loglevel=log.DEBUG)\nlog.msg(\"------------>Running reactor\")\nresult = reactor.run()\nprint result\nlog.msg(\"------------>Running stoped\")\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pandas as pd
import numpy as np
import sys
#Best Mean Test
if len(sys.argv) <= 3:
print("Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>")
print("ex: best-mean.py testdata.csv nicdrop 95000")
print("<rv> is response variable")
exit()
target_to_beat = int(sys.argv[3]) #factors
rv = sys.argv[2].split(',')
data = pd.read_csv(sys.argv[1], header=[0,1])
response_var = data[[rv[0],'factors']]
response_var.columns = response_var.columns.get_level_values(1)
print("Re-run factor means")
print(response_var.groupby('code')[rv[1]].mean())
print("Lowest observed sample mean (target to beat)")
print(response_var.groupby('code')[rv[1]].mean().min())
#print factors still remaining as viable
candidiate_factors_index = response_var.groupby('code')[rv[1]].mean().index.array.to_numpy() #all factors from csv
improved_factors_bools = (response_var.groupby('code')[rv[1]].mean() < target_to_beat).to_numpy() #boolean series
all = ""
i=0
for y in candidiate_factors_index:
if improved_factors_bools[i]:
all = all + y + ","
i=i+1
print("Effects")
if len(all) == 0:
print("NONE")
exit()
print(all.rstrip(','))
|
normal
|
{
"blob_id": "b9e78629fe094d933fdc0ffa2f9d9d1880e78c12",
"index": 9078,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif len(sys.argv) <= 3:\n print('Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>')\n print('ex: best-mean.py testdata.csv nicdrop 95000')\n print('<rv> is response variable')\n exit()\n<mask token>\nprint('Re-run factor means')\nprint(response_var.groupby('code')[rv[1]].mean())\nprint('Lowest observed sample mean (target to beat)')\nprint(response_var.groupby('code')[rv[1]].mean().min())\n<mask token>\nfor y in candidiate_factors_index:\n if improved_factors_bools[i]:\n all = all + y + ','\n i = i + 1\nprint('Effects')\nif len(all) == 0:\n print('NONE')\n exit()\nprint(all.rstrip(','))\n",
"step-3": "<mask token>\nif len(sys.argv) <= 3:\n print('Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>')\n print('ex: best-mean.py testdata.csv nicdrop 95000')\n print('<rv> is response variable')\n exit()\ntarget_to_beat = int(sys.argv[3])\nrv = sys.argv[2].split(',')\ndata = pd.read_csv(sys.argv[1], header=[0, 1])\nresponse_var = data[[rv[0], 'factors']]\nresponse_var.columns = response_var.columns.get_level_values(1)\nprint('Re-run factor means')\nprint(response_var.groupby('code')[rv[1]].mean())\nprint('Lowest observed sample mean (target to beat)')\nprint(response_var.groupby('code')[rv[1]].mean().min())\ncandidiate_factors_index = response_var.groupby('code')[rv[1]].mean(\n ).index.array.to_numpy()\nimproved_factors_bools = (response_var.groupby('code')[rv[1]].mean() <\n target_to_beat).to_numpy()\nall = ''\ni = 0\nfor y in candidiate_factors_index:\n if improved_factors_bools[i]:\n all = all + y + ','\n i = i + 1\nprint('Effects')\nif len(all) == 0:\n print('NONE')\n exit()\nprint(all.rstrip(','))\n",
"step-4": "import pandas as pd\nimport numpy as np\nimport sys\nif len(sys.argv) <= 3:\n print('Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>')\n print('ex: best-mean.py testdata.csv nicdrop 95000')\n print('<rv> is response variable')\n exit()\ntarget_to_beat = int(sys.argv[3])\nrv = sys.argv[2].split(',')\ndata = pd.read_csv(sys.argv[1], header=[0, 1])\nresponse_var = data[[rv[0], 'factors']]\nresponse_var.columns = response_var.columns.get_level_values(1)\nprint('Re-run factor means')\nprint(response_var.groupby('code')[rv[1]].mean())\nprint('Lowest observed sample mean (target to beat)')\nprint(response_var.groupby('code')[rv[1]].mean().min())\ncandidiate_factors_index = response_var.groupby('code')[rv[1]].mean(\n ).index.array.to_numpy()\nimproved_factors_bools = (response_var.groupby('code')[rv[1]].mean() <\n target_to_beat).to_numpy()\nall = ''\ni = 0\nfor y in candidiate_factors_index:\n if improved_factors_bools[i]:\n all = all + y + ','\n i = i + 1\nprint('Effects')\nif len(all) == 0:\n print('NONE')\n exit()\nprint(all.rstrip(','))\n",
"step-5": "import pandas as pd\nimport numpy as np\nimport sys\n\n#Best Mean Test\nif len(sys.argv) <= 3:\n\tprint(\"Not enough args usage: anova.py <*.csv> <rv1,rv2> <target to beat>\")\n\tprint(\"ex: best-mean.py testdata.csv nicdrop 95000\")\n\tprint(\"<rv> is response variable\")\n\texit()\n\ntarget_to_beat = int(sys.argv[3]) #factors\nrv = sys.argv[2].split(',')\n\ndata = pd.read_csv(sys.argv[1], header=[0,1])\nresponse_var = data[[rv[0],'factors']]\nresponse_var.columns = response_var.columns.get_level_values(1)\n\nprint(\"Re-run factor means\")\nprint(response_var.groupby('code')[rv[1]].mean())\n\nprint(\"Lowest observed sample mean (target to beat)\")\nprint(response_var.groupby('code')[rv[1]].mean().min())\n\n#print factors still remaining as viable\ncandidiate_factors_index = response_var.groupby('code')[rv[1]].mean().index.array.to_numpy() #all factors from csv\nimproved_factors_bools = (response_var.groupby('code')[rv[1]].mean() < target_to_beat).to_numpy() #boolean series\nall = \"\"\ni=0\nfor y in candidiate_factors_index:\n\tif improved_factors_bools[i]:\n\t\tall = all + y + \",\"\n\ti=i+1\nprint(\"Effects\")\nif len(all) == 0:\n\tprint(\"NONE\")\n\texit()\nprint(all.rstrip(','))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from hops import constants
class Cluster(object):
"""
Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore
"""
def __init__(self, cluster_json):
"""
Initialize the cluster object from JSON payload
Args:
:cluster_json: JSON data of the cluster
"""
self.datapoint_name = cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]
self.cluster = int(cluster_json[constants.REST_CONFIG.
JSON_CLUSTERING_ANALYSIS_CLUSTER])
|
normal
|
{
"blob_id": "753c87a3d22aeca1001eb770831b846b175d873e",
"index": 9139,
"step-1": "<mask token>\n\n\nclass Cluster(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Cluster(object):\n <mask token>\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-3": "<mask token>\n\n\nclass Cluster(object):\n \"\"\"\n Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore\n \"\"\"\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-4": "from hops import constants\n\n\nclass Cluster(object):\n \"\"\"\n Represents a Cluster in Cluster Analysis computed for a featuregroup or training dataset in the featurestore\n \"\"\"\n\n def __init__(self, cluster_json):\n \"\"\"\n Initialize the cluster object from JSON payload\n\n Args:\n :cluster_json: JSON data of the cluster\n \"\"\"\n self.datapoint_name = cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_DATA_POINT_NAME]\n self.cluster = int(cluster_json[constants.REST_CONFIG.\n JSON_CLUSTERING_ANALYSIS_CLUSTER])\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
def cubarea(l2,b2,h2):
print("Area of cuboid =",2*(l2+b2+h2))
def cubperimeter(l2,b2,h2):
print("Perimeter of cuboid =",4*(l2+b2+h2))
|
normal
|
{
"blob_id": "45a85ff765833fd62fc1670404d8994818788707",
"index": 6873,
"step-1": "<mask token>\n",
"step-2": "def cubarea(l2, b2, h2):\n print('Area of cuboid =', 2 * (l2 + b2 + h2))\n\n\n<mask token>\n",
"step-3": "def cubarea(l2, b2, h2):\n print('Area of cuboid =', 2 * (l2 + b2 + h2))\n\n\ndef cubperimeter(l2, b2, h2):\n print('Perimeter of cuboid =', 4 * (l2 + b2 + h2))\n",
"step-4": "def cubarea(l2,b2,h2):\n print(\"Area of cuboid =\",2*(l2+b2+h2))\ndef cubperimeter(l2,b2,h2):\n print(\"Perimeter of cuboid =\",4*(l2+b2+h2)) \n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import math
import random
import pygame
pygame.init()
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
clock = pygame.time.Clock()
pygame.display.set_caption('space invaders')
background = pygame.image.load('background.png')
score = 0
previous_score = 0
score_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 32)
textX = 10
testY = 10
# intro
intro = True
intro_text = "SpaceInvaders"
intro_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)
intro_font2 = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)
# PlayButton
play_button = pygame.image.load('play-button.png')
play_button_X = (SCREEN_WIDTH / 2) - play_button.get_width()
play_button_Y = (SCREEN_HEIGHT / (4 / 3)) - play_button.get_height()
# GameOver
gameover = False
gameover_text = "Game Over"
replay_button = pygame.image.load('replay.png')
# player
player_image = pygame.image.load('spaceship.png')
player_X = 370
player_Y = 480
player_movement = 0
# bullet
bullet_image = pygame.image.load('hot.png')
bullet_X = []
bullet_Y = []
bullet_movement = 0.7
bullet_fired = []
num_bullet = 1
for i in range(num_bullet):
bullet_X.append(0)
bullet_Y.append(player_Y)
bullet_fired.append(False)
# enemy
enemy_image = pygame.image.load('ufo.png')
enemy_X = []
enemy_Y = []
enemy_X_movement = []
enemy_Y_movement = 40
num_enemies = 2
# gamespeedincrement
gamespeed = 0
gamespeed_increment = 0.05
for i in range(num_enemies):
enemy_X.append(random.randint(0, 736))
enemy_Y.append(random.randint(50, 150))
enemy_X_movement.append(0.2)
def player(x, y):
screen.blit(player_image, (x, y))
def fire_bullet(x, y, n):
global bullet_fired
bullet_fired[n] = True
screen.blit(bullet_image, (x + 16, y + 10))
def add_bullet():
global num_bullet
num_bullet += 1
bullet_X.append(0)
bullet_Y.append(player_Y)
bullet_fired.append(False)
def spawn_enemy(x, y):
screen.blit(enemy_image, (x, y))
def add_enemy():
global num_enemies
enemy_X.append(random.randint(0, 736))
enemy_Y.append(random.randint(50, 150))
enemy_X_movement.append(0.2)
num_enemies += 1
def reset_enemy(index):
enemy_X[index] = random.randint(0, 736)
enemy_Y[index] = random.randint(50, 150)
enemy_X_movement[index] = 0.2
def reset_bullet(n):
global bullet_fired, bullet_Y
bullet_fired[n] = False
bullet_Y[n] = player_Y
def isCollion(eX, eY, bX, bY):
distance = math.sqrt(math.pow(eX - bX, 2) + (math.pow(eY - bY, 2)))
if distance < 27:
return True
else:
return False
def show_score():
text = score_font.render("Score: " + str(score), True, (255, 255, 255))
screen.blit(text, (textX, testY))
def show_intro():
show_big_text(intro_text)
show_play_button()
def show_big_text(s):
text = intro_font.render(s, True, (89, 203, 255))
text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))
screen.blit(text, text_rect)
text2 = intro_font2.render(s, True, (250, 50, 183))
text_rect2 = text.get_rect(center=((SCREEN_WIDTH / 2) + 3, (SCREEN_HEIGHT / 2) + 3))
screen.blit(text2, text_rect2)
def show_play_button():
screen.blit(play_button, (play_button_X, play_button_Y))
def show_replay_button():
screen.blit(replay_button, (play_button_X, play_button_Y))
def play_button_clicked():
click = pygame.mouse.get_pressed()
if click[0] == 1:
pos = pygame.mouse.get_pos()
if play_button_X < pos[0] < play_button_X + play_button.get_width():
if play_button_Y < pos[1] < play_button_Y + play_button.get_height():
return True
return False
def game_over_screen():
show_big_text(gameover_text)
show_score()
show_replay_button()
def reset():
global num_enemies, enemy_X, enemy_Y, player_X, player_Y, score, bullet_fired, gamespeed, num_bullet, bullet_X, bullet_Y
num_enemies = 2
enemy_X = []
enemy_Y = []
for i in range(num_enemies):
enemy_X.append(random.randint(0, 736))
enemy_Y.append(random.randint(50, 150))
enemy_X_movement.append(2)
player_X = 370
player_Y = 480
score = 0
bullet_fired = []
bullet_fired.append(False)
gamespeed = 0
num_bullet = 1
bullet_X = []
bullet_X.append(0)
bullet_Y = []
bullet_Y.append(player_Y)
running = True
while running:
screen.fill((0, 0, 0))
screen.blit(background, (0, 0))
dt = clock.tick(60)
while intro:
show_intro()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if play_button_clicked():
intro = False
pygame.display.update()
while gameover:
game_over_screen()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
if play_button_clicked():
reset()
gameover = False
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
player_movement = -0.2 - gamespeed
if event.key == pygame.K_RIGHT:
player_movement = 0.2 + gamespeed
if event.key == pygame.K_SPACE:
for i in range(num_bullet):
if not bullet_fired[i]:
bullet_X[i] = player_X
fire_bullet(bullet_X[i], bullet_Y[i], i)
break
if event.type == pygame.KEYUP:
if event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:
player_movement = 0
# playermovement
player_X += player_movement * dt
if player_X <= 1:
player_X = 1
elif player_X >= 735:
player_X = 735
# bulletmovement
for i in range(num_bullet):
if bullet_Y[i] <= 1:
reset_bullet(i)
if bullet_fired[i]:
bullet_Y[i] -= bullet_movement * dt
fire_bullet(bullet_X[i], bullet_Y[i], i)
# enemy_movement
for i in range(num_enemies):
if enemy_Y[i] >= 440:
gameover = True
for j in range(num_bullet):
if bullet_fired[j]:
collision = isCollion(enemy_X[i], enemy_Y[i], bullet_X[j], bullet_Y[j])
if collision:
reset_enemy(i)
reset_bullet(j)
score += 1
if score != 0 and previous_score != score:
if score % 3 == 0:
add_enemy()
print("added enemy")
if score % 10 == 0:
gamespeed += gamespeed_increment
print("increased gamespeed")
if score % 20 == 0:
add_bullet()
print("added bullet")
previous_score = score
if enemy_X_movement[i] < 0:
enemy_X[i] += (enemy_X_movement[i] - gamespeed) * dt
else:
enemy_X[i] += (enemy_X_movement[i] + gamespeed) * dt
if enemy_X[i] <= 1:
enemy_X[i] = 2
enemy_X_movement[i] = -enemy_X_movement[i]
enemy_Y[i] += (enemy_Y_movement + gamespeed)
elif enemy_X[i] >= 735:
enemy_X[i] = 734
enemy_X_movement[i] = -enemy_X_movement[i]
enemy_Y[i] += (enemy_Y_movement + gamespeed)
spawn_enemy(enemy_X[i], enemy_Y[i])
player(player_X, player_Y)
show_score()
pygame.display.update()
|
normal
|
{
"blob_id": "f5dffa3c22bb35ed07cb5ca28f2ba02ea3c07dda",
"index": 1083,
"step-1": "<mask token>\n\n\ndef player(x, y):\n screen.blit(player_image, (x, y))\n\n\ndef fire_bullet(x, y, n):\n global bullet_fired\n bullet_fired[n] = True\n screen.blit(bullet_image, (x + 16, y + 10))\n\n\ndef add_bullet():\n global num_bullet\n num_bullet += 1\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n\ndef spawn_enemy(x, y):\n screen.blit(enemy_image, (x, y))\n\n\ndef add_enemy():\n global num_enemies\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n num_enemies += 1\n\n\ndef reset_enemy(index):\n enemy_X[index] = random.randint(0, 736)\n enemy_Y[index] = random.randint(50, 150)\n enemy_X_movement[index] = 0.2\n\n\ndef reset_bullet(n):\n global bullet_fired, bullet_Y\n bullet_fired[n] = False\n bullet_Y[n] = player_Y\n\n\ndef isCollion(eX, eY, bX, bY):\n distance = math.sqrt(math.pow(eX - bX, 2) + math.pow(eY - bY, 2))\n if distance < 27:\n return True\n else:\n return False\n\n\ndef show_score():\n text = score_font.render('Score: ' + str(score), True, (255, 255, 255))\n screen.blit(text, (textX, testY))\n\n\ndef show_intro():\n show_big_text(intro_text)\n show_play_button()\n\n\ndef show_big_text(s):\n text = intro_font.render(s, True, (89, 203, 255))\n text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))\n screen.blit(text, text_rect)\n text2 = intro_font2.render(s, True, (250, 50, 183))\n text_rect2 = text.get_rect(center=(SCREEN_WIDTH / 2 + 3, SCREEN_HEIGHT /\n 2 + 3))\n screen.blit(text2, text_rect2)\n\n\ndef show_play_button():\n screen.blit(play_button, (play_button_X, play_button_Y))\n\n\ndef show_replay_button():\n screen.blit(replay_button, (play_button_X, play_button_Y))\n\n\ndef play_button_clicked():\n click = pygame.mouse.get_pressed()\n if click[0] == 1:\n pos = pygame.mouse.get_pos()\n if play_button_X < pos[0] < play_button_X + play_button.get_width():\n if play_button_Y < pos[1] < play_button_Y + play_button.get_height(\n ):\n return True\n return False\n\n\ndef game_over_screen():\n show_big_text(gameover_text)\n show_score()\n show_replay_button()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef player(x, y):\n screen.blit(player_image, (x, y))\n\n\ndef fire_bullet(x, y, n):\n global bullet_fired\n bullet_fired[n] = True\n screen.blit(bullet_image, (x + 16, y + 10))\n\n\ndef add_bullet():\n global num_bullet\n num_bullet += 1\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n\ndef spawn_enemy(x, y):\n screen.blit(enemy_image, (x, y))\n\n\ndef add_enemy():\n global num_enemies\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n num_enemies += 1\n\n\ndef reset_enemy(index):\n enemy_X[index] = random.randint(0, 736)\n enemy_Y[index] = random.randint(50, 150)\n enemy_X_movement[index] = 0.2\n\n\ndef reset_bullet(n):\n global bullet_fired, bullet_Y\n bullet_fired[n] = False\n bullet_Y[n] = player_Y\n\n\ndef isCollion(eX, eY, bX, bY):\n distance = math.sqrt(math.pow(eX - bX, 2) + math.pow(eY - bY, 2))\n if distance < 27:\n return True\n else:\n return False\n\n\ndef show_score():\n text = score_font.render('Score: ' + str(score), True, (255, 255, 255))\n screen.blit(text, (textX, testY))\n\n\ndef show_intro():\n show_big_text(intro_text)\n show_play_button()\n\n\ndef show_big_text(s):\n text = intro_font.render(s, True, (89, 203, 255))\n text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))\n screen.blit(text, text_rect)\n text2 = intro_font2.render(s, True, (250, 50, 183))\n text_rect2 = text.get_rect(center=(SCREEN_WIDTH / 2 + 3, SCREEN_HEIGHT /\n 2 + 3))\n screen.blit(text2, text_rect2)\n\n\ndef show_play_button():\n screen.blit(play_button, (play_button_X, play_button_Y))\n\n\ndef show_replay_button():\n screen.blit(replay_button, (play_button_X, play_button_Y))\n\n\ndef play_button_clicked():\n click = pygame.mouse.get_pressed()\n if click[0] == 1:\n pos = pygame.mouse.get_pos()\n if play_button_X < pos[0] < play_button_X + play_button.get_width():\n if play_button_Y < pos[1] < play_button_Y + play_button.get_height(\n ):\n return True\n return False\n\n\ndef game_over_screen():\n show_big_text(gameover_text)\n show_score()\n show_replay_button()\n\n\ndef reset():\n global num_enemies, enemy_X, enemy_Y, player_X, player_Y, score, bullet_fired, gamespeed, num_bullet, bullet_X, bullet_Y\n num_enemies = 2\n enemy_X = []\n enemy_Y = []\n for i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(2)\n player_X = 370\n player_Y = 480\n score = 0\n bullet_fired = []\n bullet_fired.append(False)\n gamespeed = 0\n num_bullet = 1\n bullet_X = []\n bullet_X.append(0)\n bullet_Y = []\n bullet_Y.append(player_Y)\n\n\n<mask token>\n",
"step-3": "<mask token>\npygame.init()\nSCREEN_WIDTH = 800\nSCREEN_HEIGHT = 600\nscreen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\nclock = pygame.time.Clock()\npygame.display.set_caption('space invaders')\nbackground = pygame.image.load('background.png')\nscore = 0\nprevious_score = 0\nscore_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 32)\ntextX = 10\ntestY = 10\nintro = True\nintro_text = 'SpaceInvaders'\nintro_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\nintro_font2 = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\nplay_button = pygame.image.load('play-button.png')\nplay_button_X = SCREEN_WIDTH / 2 - play_button.get_width()\nplay_button_Y = SCREEN_HEIGHT / (4 / 3) - play_button.get_height()\ngameover = False\ngameover_text = 'Game Over'\nreplay_button = pygame.image.load('replay.png')\nplayer_image = pygame.image.load('spaceship.png')\nplayer_X = 370\nplayer_Y = 480\nplayer_movement = 0\nbullet_image = pygame.image.load('hot.png')\nbullet_X = []\nbullet_Y = []\nbullet_movement = 0.7\nbullet_fired = []\nnum_bullet = 1\nfor i in range(num_bullet):\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\nenemy_image = pygame.image.load('ufo.png')\nenemy_X = []\nenemy_Y = []\nenemy_X_movement = []\nenemy_Y_movement = 40\nnum_enemies = 2\ngamespeed = 0\ngamespeed_increment = 0.05\nfor i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n\n\ndef player(x, y):\n screen.blit(player_image, (x, y))\n\n\ndef fire_bullet(x, y, n):\n global bullet_fired\n bullet_fired[n] = True\n screen.blit(bullet_image, (x + 16, y + 10))\n\n\ndef add_bullet():\n global num_bullet\n num_bullet += 1\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n\ndef spawn_enemy(x, y):\n screen.blit(enemy_image, (x, y))\n\n\ndef add_enemy():\n global num_enemies\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n num_enemies += 1\n\n\ndef reset_enemy(index):\n enemy_X[index] = random.randint(0, 736)\n enemy_Y[index] = random.randint(50, 150)\n enemy_X_movement[index] = 0.2\n\n\ndef reset_bullet(n):\n global bullet_fired, bullet_Y\n bullet_fired[n] = False\n bullet_Y[n] = player_Y\n\n\ndef isCollion(eX, eY, bX, bY):\n distance = math.sqrt(math.pow(eX - bX, 2) + math.pow(eY - bY, 2))\n if distance < 27:\n return True\n else:\n return False\n\n\ndef show_score():\n text = score_font.render('Score: ' + str(score), True, (255, 255, 255))\n screen.blit(text, (textX, testY))\n\n\ndef show_intro():\n show_big_text(intro_text)\n show_play_button()\n\n\ndef show_big_text(s):\n text = intro_font.render(s, True, (89, 203, 255))\n text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))\n screen.blit(text, text_rect)\n text2 = intro_font2.render(s, True, (250, 50, 183))\n text_rect2 = text.get_rect(center=(SCREEN_WIDTH / 2 + 3, SCREEN_HEIGHT /\n 2 + 3))\n screen.blit(text2, text_rect2)\n\n\ndef show_play_button():\n screen.blit(play_button, (play_button_X, play_button_Y))\n\n\ndef show_replay_button():\n screen.blit(replay_button, (play_button_X, play_button_Y))\n\n\ndef play_button_clicked():\n click = pygame.mouse.get_pressed()\n if click[0] == 1:\n pos = pygame.mouse.get_pos()\n if play_button_X < pos[0] < play_button_X + play_button.get_width():\n if play_button_Y < pos[1] < play_button_Y + play_button.get_height(\n ):\n return True\n return False\n\n\ndef game_over_screen():\n show_big_text(gameover_text)\n show_score()\n show_replay_button()\n\n\ndef reset():\n global num_enemies, enemy_X, enemy_Y, player_X, player_Y, score, bullet_fired, gamespeed, num_bullet, bullet_X, bullet_Y\n num_enemies = 2\n enemy_X = []\n enemy_Y = []\n for i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(2)\n player_X = 370\n player_Y = 480\n score = 0\n bullet_fired = []\n bullet_fired.append(False)\n gamespeed = 0\n num_bullet = 1\n bullet_X = []\n bullet_X.append(0)\n bullet_Y = []\n bullet_Y.append(player_Y)\n\n\nrunning = True\nwhile running:\n screen.fill((0, 0, 0))\n screen.blit(background, (0, 0))\n dt = clock.tick(60)\n while intro:\n show_intro()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n if play_button_clicked():\n intro = False\n pygame.display.update()\n while gameover:\n game_over_screen()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n if play_button_clicked():\n reset()\n gameover = False\n pygame.display.update()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n running = False\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_LEFT:\n player_movement = -0.2 - gamespeed\n if event.key == pygame.K_RIGHT:\n player_movement = 0.2 + gamespeed\n if event.key == pygame.K_SPACE:\n for i in range(num_bullet):\n if not bullet_fired[i]:\n bullet_X[i] = player_X\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n break\n if event.type == pygame.KEYUP:\n if event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:\n player_movement = 0\n player_X += player_movement * dt\n if player_X <= 1:\n player_X = 1\n elif player_X >= 735:\n player_X = 735\n for i in range(num_bullet):\n if bullet_Y[i] <= 1:\n reset_bullet(i)\n if bullet_fired[i]:\n bullet_Y[i] -= bullet_movement * dt\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n for i in range(num_enemies):\n if enemy_Y[i] >= 440:\n gameover = True\n for j in range(num_bullet):\n if bullet_fired[j]:\n collision = isCollion(enemy_X[i], enemy_Y[i], bullet_X[j],\n bullet_Y[j])\n if collision:\n reset_enemy(i)\n reset_bullet(j)\n score += 1\n if score != 0 and previous_score != score:\n if score % 3 == 0:\n add_enemy()\n print('added enemy')\n if score % 10 == 0:\n gamespeed += gamespeed_increment\n print('increased gamespeed')\n if score % 20 == 0:\n add_bullet()\n print('added bullet')\n previous_score = score\n if enemy_X_movement[i] < 0:\n enemy_X[i] += (enemy_X_movement[i] - gamespeed) * dt\n else:\n enemy_X[i] += (enemy_X_movement[i] + gamespeed) * dt\n if enemy_X[i] <= 1:\n enemy_X[i] = 2\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += enemy_Y_movement + gamespeed\n elif enemy_X[i] >= 735:\n enemy_X[i] = 734\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += enemy_Y_movement + gamespeed\n spawn_enemy(enemy_X[i], enemy_Y[i])\n player(player_X, player_Y)\n show_score()\n pygame.display.update()\n",
"step-4": "import math\nimport random\nimport pygame\npygame.init()\nSCREEN_WIDTH = 800\nSCREEN_HEIGHT = 600\nscreen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\nclock = pygame.time.Clock()\npygame.display.set_caption('space invaders')\nbackground = pygame.image.load('background.png')\nscore = 0\nprevious_score = 0\nscore_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 32)\ntextX = 10\ntestY = 10\nintro = True\nintro_text = 'SpaceInvaders'\nintro_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\nintro_font2 = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\nplay_button = pygame.image.load('play-button.png')\nplay_button_X = SCREEN_WIDTH / 2 - play_button.get_width()\nplay_button_Y = SCREEN_HEIGHT / (4 / 3) - play_button.get_height()\ngameover = False\ngameover_text = 'Game Over'\nreplay_button = pygame.image.load('replay.png')\nplayer_image = pygame.image.load('spaceship.png')\nplayer_X = 370\nplayer_Y = 480\nplayer_movement = 0\nbullet_image = pygame.image.load('hot.png')\nbullet_X = []\nbullet_Y = []\nbullet_movement = 0.7\nbullet_fired = []\nnum_bullet = 1\nfor i in range(num_bullet):\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\nenemy_image = pygame.image.load('ufo.png')\nenemy_X = []\nenemy_Y = []\nenemy_X_movement = []\nenemy_Y_movement = 40\nnum_enemies = 2\ngamespeed = 0\ngamespeed_increment = 0.05\nfor i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n\n\ndef player(x, y):\n screen.blit(player_image, (x, y))\n\n\ndef fire_bullet(x, y, n):\n global bullet_fired\n bullet_fired[n] = True\n screen.blit(bullet_image, (x + 16, y + 10))\n\n\ndef add_bullet():\n global num_bullet\n num_bullet += 1\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n\ndef spawn_enemy(x, y):\n screen.blit(enemy_image, (x, y))\n\n\ndef add_enemy():\n global num_enemies\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n num_enemies += 1\n\n\ndef reset_enemy(index):\n enemy_X[index] = random.randint(0, 736)\n enemy_Y[index] = random.randint(50, 150)\n enemy_X_movement[index] = 0.2\n\n\ndef reset_bullet(n):\n global bullet_fired, bullet_Y\n bullet_fired[n] = False\n bullet_Y[n] = player_Y\n\n\ndef isCollion(eX, eY, bX, bY):\n distance = math.sqrt(math.pow(eX - bX, 2) + math.pow(eY - bY, 2))\n if distance < 27:\n return True\n else:\n return False\n\n\ndef show_score():\n text = score_font.render('Score: ' + str(score), True, (255, 255, 255))\n screen.blit(text, (textX, testY))\n\n\ndef show_intro():\n show_big_text(intro_text)\n show_play_button()\n\n\ndef show_big_text(s):\n text = intro_font.render(s, True, (89, 203, 255))\n text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))\n screen.blit(text, text_rect)\n text2 = intro_font2.render(s, True, (250, 50, 183))\n text_rect2 = text.get_rect(center=(SCREEN_WIDTH / 2 + 3, SCREEN_HEIGHT /\n 2 + 3))\n screen.blit(text2, text_rect2)\n\n\ndef show_play_button():\n screen.blit(play_button, (play_button_X, play_button_Y))\n\n\ndef show_replay_button():\n screen.blit(replay_button, (play_button_X, play_button_Y))\n\n\ndef play_button_clicked():\n click = pygame.mouse.get_pressed()\n if click[0] == 1:\n pos = pygame.mouse.get_pos()\n if play_button_X < pos[0] < play_button_X + play_button.get_width():\n if play_button_Y < pos[1] < play_button_Y + play_button.get_height(\n ):\n return True\n return False\n\n\ndef game_over_screen():\n show_big_text(gameover_text)\n show_score()\n show_replay_button()\n\n\ndef reset():\n global num_enemies, enemy_X, enemy_Y, player_X, player_Y, score, bullet_fired, gamespeed, num_bullet, bullet_X, bullet_Y\n num_enemies = 2\n enemy_X = []\n enemy_Y = []\n for i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(2)\n player_X = 370\n player_Y = 480\n score = 0\n bullet_fired = []\n bullet_fired.append(False)\n gamespeed = 0\n num_bullet = 1\n bullet_X = []\n bullet_X.append(0)\n bullet_Y = []\n bullet_Y.append(player_Y)\n\n\nrunning = True\nwhile running:\n screen.fill((0, 0, 0))\n screen.blit(background, (0, 0))\n dt = clock.tick(60)\n while intro:\n show_intro()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n if play_button_clicked():\n intro = False\n pygame.display.update()\n while gameover:\n game_over_screen()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n if play_button_clicked():\n reset()\n gameover = False\n pygame.display.update()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n running = False\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_LEFT:\n player_movement = -0.2 - gamespeed\n if event.key == pygame.K_RIGHT:\n player_movement = 0.2 + gamespeed\n if event.key == pygame.K_SPACE:\n for i in range(num_bullet):\n if not bullet_fired[i]:\n bullet_X[i] = player_X\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n break\n if event.type == pygame.KEYUP:\n if event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:\n player_movement = 0\n player_X += player_movement * dt\n if player_X <= 1:\n player_X = 1\n elif player_X >= 735:\n player_X = 735\n for i in range(num_bullet):\n if bullet_Y[i] <= 1:\n reset_bullet(i)\n if bullet_fired[i]:\n bullet_Y[i] -= bullet_movement * dt\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n for i in range(num_enemies):\n if enemy_Y[i] >= 440:\n gameover = True\n for j in range(num_bullet):\n if bullet_fired[j]:\n collision = isCollion(enemy_X[i], enemy_Y[i], bullet_X[j],\n bullet_Y[j])\n if collision:\n reset_enemy(i)\n reset_bullet(j)\n score += 1\n if score != 0 and previous_score != score:\n if score % 3 == 0:\n add_enemy()\n print('added enemy')\n if score % 10 == 0:\n gamespeed += gamespeed_increment\n print('increased gamespeed')\n if score % 20 == 0:\n add_bullet()\n print('added bullet')\n previous_score = score\n if enemy_X_movement[i] < 0:\n enemy_X[i] += (enemy_X_movement[i] - gamespeed) * dt\n else:\n enemy_X[i] += (enemy_X_movement[i] + gamespeed) * dt\n if enemy_X[i] <= 1:\n enemy_X[i] = 2\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += enemy_Y_movement + gamespeed\n elif enemy_X[i] >= 735:\n enemy_X[i] = 734\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += enemy_Y_movement + gamespeed\n spawn_enemy(enemy_X[i], enemy_Y[i])\n player(player_X, player_Y)\n show_score()\n pygame.display.update()\n",
"step-5": "import math\nimport random\n\nimport pygame\n\npygame.init()\n\nSCREEN_WIDTH = 800\nSCREEN_HEIGHT = 600\nscreen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))\n\nclock = pygame.time.Clock()\n\npygame.display.set_caption('space invaders')\n\nbackground = pygame.image.load('background.png')\n\nscore = 0\nprevious_score = 0\nscore_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 32)\ntextX = 10\ntestY = 10\n\n# intro\nintro = True\nintro_text = \"SpaceInvaders\"\nintro_font = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\nintro_font2 = pygame.font.Font('arcade_weknow/ARCADE.otf', 64)\n\n# PlayButton\nplay_button = pygame.image.load('play-button.png')\nplay_button_X = (SCREEN_WIDTH / 2) - play_button.get_width()\nplay_button_Y = (SCREEN_HEIGHT / (4 / 3)) - play_button.get_height()\n\n# GameOver\ngameover = False\ngameover_text = \"Game Over\"\nreplay_button = pygame.image.load('replay.png')\n\n# player\nplayer_image = pygame.image.load('spaceship.png')\nplayer_X = 370\nplayer_Y = 480\nplayer_movement = 0\n\n# bullet\nbullet_image = pygame.image.load('hot.png')\nbullet_X = []\nbullet_Y = []\nbullet_movement = 0.7\nbullet_fired = []\nnum_bullet = 1\nfor i in range(num_bullet):\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n# enemy\nenemy_image = pygame.image.load('ufo.png')\nenemy_X = []\nenemy_Y = []\nenemy_X_movement = []\nenemy_Y_movement = 40\nnum_enemies = 2\n\n# gamespeedincrement\ngamespeed = 0\ngamespeed_increment = 0.05\n\nfor i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n\n\ndef player(x, y):\n screen.blit(player_image, (x, y))\n\n\ndef fire_bullet(x, y, n):\n global bullet_fired\n bullet_fired[n] = True\n screen.blit(bullet_image, (x + 16, y + 10))\n\n\ndef add_bullet():\n global num_bullet\n num_bullet += 1\n bullet_X.append(0)\n bullet_Y.append(player_Y)\n bullet_fired.append(False)\n\n\ndef spawn_enemy(x, y):\n screen.blit(enemy_image, (x, y))\n\n\ndef add_enemy():\n global num_enemies\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(0.2)\n num_enemies += 1\n\n\ndef reset_enemy(index):\n enemy_X[index] = random.randint(0, 736)\n enemy_Y[index] = random.randint(50, 150)\n enemy_X_movement[index] = 0.2\n\n\ndef reset_bullet(n):\n global bullet_fired, bullet_Y\n bullet_fired[n] = False\n bullet_Y[n] = player_Y\n\n\ndef isCollion(eX, eY, bX, bY):\n distance = math.sqrt(math.pow(eX - bX, 2) + (math.pow(eY - bY, 2)))\n if distance < 27:\n return True\n else:\n return False\n\n\ndef show_score():\n text = score_font.render(\"Score: \" + str(score), True, (255, 255, 255))\n screen.blit(text, (textX, testY))\n\n\ndef show_intro():\n show_big_text(intro_text)\n show_play_button()\n\n\ndef show_big_text(s):\n text = intro_font.render(s, True, (89, 203, 255))\n text_rect = text.get_rect(center=(SCREEN_WIDTH / 2, SCREEN_HEIGHT / 2))\n screen.blit(text, text_rect)\n text2 = intro_font2.render(s, True, (250, 50, 183))\n text_rect2 = text.get_rect(center=((SCREEN_WIDTH / 2) + 3, (SCREEN_HEIGHT / 2) + 3))\n screen.blit(text2, text_rect2)\n\n\ndef show_play_button():\n screen.blit(play_button, (play_button_X, play_button_Y))\n\n\ndef show_replay_button():\n screen.blit(replay_button, (play_button_X, play_button_Y))\n\n\ndef play_button_clicked():\n click = pygame.mouse.get_pressed()\n if click[0] == 1:\n pos = pygame.mouse.get_pos()\n if play_button_X < pos[0] < play_button_X + play_button.get_width():\n if play_button_Y < pos[1] < play_button_Y + play_button.get_height():\n return True\n return False\n\n\ndef game_over_screen():\n show_big_text(gameover_text)\n show_score()\n show_replay_button()\n\n\ndef reset():\n global num_enemies, enemy_X, enemy_Y, player_X, player_Y, score, bullet_fired, gamespeed, num_bullet, bullet_X, bullet_Y\n num_enemies = 2\n enemy_X = []\n enemy_Y = []\n for i in range(num_enemies):\n enemy_X.append(random.randint(0, 736))\n enemy_Y.append(random.randint(50, 150))\n enemy_X_movement.append(2)\n player_X = 370\n player_Y = 480\n score = 0\n bullet_fired = []\n bullet_fired.append(False)\n gamespeed = 0\n num_bullet = 1\n bullet_X = []\n bullet_X.append(0)\n bullet_Y = []\n bullet_Y.append(player_Y)\n\n\nrunning = True\nwhile running:\n\n screen.fill((0, 0, 0))\n screen.blit(background, (0, 0))\n dt = clock.tick(60)\n\n while intro:\n show_intro()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n\n if play_button_clicked():\n intro = False\n\n pygame.display.update()\n\n while gameover:\n game_over_screen()\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n quit()\n\n if play_button_clicked():\n reset()\n gameover = False\n\n pygame.display.update()\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n running = False\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_LEFT:\n player_movement = -0.2 - gamespeed\n if event.key == pygame.K_RIGHT:\n player_movement = 0.2 + gamespeed\n if event.key == pygame.K_SPACE:\n for i in range(num_bullet):\n if not bullet_fired[i]:\n bullet_X[i] = player_X\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n break\n if event.type == pygame.KEYUP:\n if event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:\n player_movement = 0\n\n # playermovement\n player_X += player_movement * dt\n if player_X <= 1:\n player_X = 1\n elif player_X >= 735:\n player_X = 735\n\n # bulletmovement\n for i in range(num_bullet):\n if bullet_Y[i] <= 1:\n reset_bullet(i)\n if bullet_fired[i]:\n bullet_Y[i] -= bullet_movement * dt\n fire_bullet(bullet_X[i], bullet_Y[i], i)\n\n # enemy_movement\n for i in range(num_enemies):\n if enemy_Y[i] >= 440:\n gameover = True\n\n for j in range(num_bullet):\n if bullet_fired[j]:\n collision = isCollion(enemy_X[i], enemy_Y[i], bullet_X[j], bullet_Y[j])\n if collision:\n reset_enemy(i)\n reset_bullet(j)\n score += 1\n\n if score != 0 and previous_score != score:\n if score % 3 == 0:\n add_enemy()\n print(\"added enemy\")\n if score % 10 == 0:\n gamespeed += gamespeed_increment\n print(\"increased gamespeed\")\n if score % 20 == 0:\n add_bullet()\n print(\"added bullet\")\n previous_score = score\n\n if enemy_X_movement[i] < 0:\n enemy_X[i] += (enemy_X_movement[i] - gamespeed) * dt\n else:\n enemy_X[i] += (enemy_X_movement[i] + gamespeed) * dt\n if enemy_X[i] <= 1:\n enemy_X[i] = 2\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += (enemy_Y_movement + gamespeed)\n elif enemy_X[i] >= 735:\n enemy_X[i] = 734\n enemy_X_movement[i] = -enemy_X_movement[i]\n enemy_Y[i] += (enemy_Y_movement + gamespeed)\n\n spawn_enemy(enemy_X[i], enemy_Y[i])\n\n player(player_X, player_Y)\n show_score()\n pygame.display.update()\n",
"step-ids": [
15,
16,
18,
19,
20
]
}
|
[
15,
16,
18,
19,
20
] |
import sqlite3
import argparse
import json
import index_db
from collections import defaultdict
def query_doc(cursor, lang, title):
cursor.execute(index_db.select_lang_title, (lang, title))
result = cursor.fetchone()
if not result:
return None
return {
'lang': result[0],
'doc_id': result[1],
'doc_path': result[2],
# 'url': result[3], # I don't think url is needed here...
'title': result[4],
'begin': result[5],
'end': result[6]
}
def locate_single_topic_texts(lang_title_dict, cursor):
same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())
return sorted(
(i for i in same_topic if i),
key=lambda x: x['lang']
)
def locate_interlanguage_texts(file_path, db_path):
with open(file_path, 'rt') as f:
interlangauge = json.load(f)
with sqlite3.connect(db_path) as conn:
c = conn.cursor()
return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Locate same topic texts over multiple languages.')
parser.add_argument('--db', dest='db_path', default=index_db.default_path,
help='a sqlite database file generated by index.py')
parser.add_argument('--input', dest='input_path',
default='interlanguage_topics.json',
help='a json file containing sets of topics over '
'multiple languages')
parser.add_argument('--output', dest='output_path',
default='interlanguage_location.json',
help='a json file locating same topic texts over '
'multiple languages')
args = parser.parse_args()
location_infos = locate_interlanguage_texts(args.input_path, args.db_path)
with open(args.output_path, 'wt') as f:
json.dump(location_infos, f)
|
normal
|
{
"blob_id": "95e7e025660e71cbdf6a6a0812964fc26d4beec0",
"index": 9657,
"step-1": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.\n default_path, help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path', default=\n 'interlanguage_topics.json', help=\n 'a json file containing sets of topics over multiple languages')\n parser.add_argument('--output', dest='output_path', default=\n 'interlanguage_location.json', help=\n 'a json file locating same topic texts over multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n",
"step-4": "import sqlite3\nimport argparse\nimport json\nimport index_db\nfrom collections import defaultdict\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {'lang': result[0], 'doc_id': result[1], 'doc_path': result[2],\n 'title': result[4], 'begin': result[5], 'end': result[6]}\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted((i for i in same_topic if i), key=lambda x: x['lang'])\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\n 'Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.\n default_path, help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path', default=\n 'interlanguage_topics.json', help=\n 'a json file containing sets of topics over multiple languages')\n parser.add_argument('--output', dest='output_path', default=\n 'interlanguage_location.json', help=\n 'a json file locating same topic texts over multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n",
"step-5": "import sqlite3\nimport argparse\nimport json\nimport index_db\nfrom collections import defaultdict\n\n\ndef query_doc(cursor, lang, title):\n cursor.execute(index_db.select_lang_title, (lang, title))\n result = cursor.fetchone()\n if not result:\n return None\n return {\n 'lang': result[0],\n 'doc_id': result[1],\n 'doc_path': result[2],\n # 'url': result[3], # I don't think url is needed here...\n 'title': result[4],\n 'begin': result[5],\n 'end': result[6]\n }\n\n\ndef locate_single_topic_texts(lang_title_dict, cursor):\n same_topic = (query_doc(cursor, l, t) for l, t in lang_title_dict.items())\n return sorted(\n (i for i in same_topic if i),\n key=lambda x: x['lang']\n )\n\n\ndef locate_interlanguage_texts(file_path, db_path):\n with open(file_path, 'rt') as f:\n interlangauge = json.load(f)\n\n with sqlite3.connect(db_path) as conn:\n c = conn.cursor()\n return [locate_single_topic_texts(pairs, c) for pairs in interlangauge]\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(\n description='Locate same topic texts over multiple languages.')\n parser.add_argument('--db', dest='db_path', default=index_db.default_path,\n help='a sqlite database file generated by index.py')\n parser.add_argument('--input', dest='input_path',\n default='interlanguage_topics.json',\n help='a json file containing sets of topics over '\n 'multiple languages')\n parser.add_argument('--output', dest='output_path',\n default='interlanguage_location.json',\n help='a json file locating same topic texts over '\n 'multiple languages')\n args = parser.parse_args()\n location_infos = locate_interlanguage_texts(args.input_path, args.db_path)\n with open(args.output_path, 'wt') as f:\n json.dump(location_infos, f)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from django.apps import AppConfig
class ModuloConfig(AppConfig):
name = 'modulo'
verbose_name = 'TUM:JungeAkademie - Modulo'
def ready(self):
#start-up / initialization code here!!!
from .recommender import Recommender
Recommender.initialize()
|
normal
|
{
"blob_id": "31275ca9e20da9d2709ea396e55c113b3ff4f571",
"index": 7738,
"step-1": "<mask token>\n\n\nclass ModuloConfig(AppConfig):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ModuloConfig(AppConfig):\n <mask token>\n <mask token>\n\n def ready(self):\n from .recommender import Recommender\n Recommender.initialize()\n",
"step-3": "<mask token>\n\n\nclass ModuloConfig(AppConfig):\n name = 'modulo'\n verbose_name = 'TUM:JungeAkademie - Modulo'\n\n def ready(self):\n from .recommender import Recommender\n Recommender.initialize()\n",
"step-4": "from django.apps import AppConfig\n\n\nclass ModuloConfig(AppConfig):\n name = 'modulo'\n verbose_name = 'TUM:JungeAkademie - Modulo'\n\n def ready(self):\n from .recommender import Recommender\n Recommender.initialize()\n",
"step-5": "from django.apps import AppConfig\n\n\nclass ModuloConfig(AppConfig):\n name = 'modulo'\n verbose_name = 'TUM:JungeAkademie - Modulo'\n \n def ready(self):\n #start-up / initialization code here!!!\n from .recommender import Recommender\n Recommender.initialize()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from .cli import cli
if __name__ == "__main__":
exit(cli.main(prog_name="htmap"))
|
normal
|
{
"blob_id": "069338b188f3cf16357b2502cbb3130b69918bd9",
"index": 286,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-3": "from .cli import cli\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-4": "from .cli import cli\n\nif __name__ == \"__main__\":\n exit(cli.main(prog_name=\"htmap\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Download the full CHIRPS 2.0 data for a specific type (dekads, pentads, daily ...)
with the possibility to automatically recut the data over Argentina.
"""
import os
import requests
import urllib.request
import time
from bs4 import BeautifulSoup
import subprocess
##############
# PARAMETERS to define
# Set a pre-existing directory where the CHIRPS files must be saved
download_dir = ""
# Url for global dekad, change if you want another product
url = 'https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_dekad/netcdf/'
# Recut the data over Argentina
argentina = False
startindex = 5
##############
if download_dir != "":
os.chdir(download_dir)
response = requests.get(url)
soup = BeautifulSoup(response.text,"html.parser")
soup.findAll('a')
# First link to download in the page
# Here the index = 5 is valid for the dekad link but it may change if you download another product (ex : daily, dekad, monthly)
# To be sure you can check the link and check that it is the first year
one_a_tag = soup.findAll('a')[startindex:]
links = [one_a_tag[i]['href'] for i in range(len(one_a_tag))]
for link in links:
print(link)
download_url = url + link
urllib.request.urlretrieve(download_url,"./"+link)
# Section to recut CHIRPS over Argentina
if argentina:
subprocess.check_call(["cdo", "sellonlatbox,-80,-44,-60,-20", link, link.replace(".nc", "ARG.nc")])
subprocess.check_call(["rm", link])
time.sleep(1)
else:
print("Please enter a valid download direction")
|
normal
|
{
"blob_id": "ff0495ee1f4aa1f243c82b709a974d3d7c37e8bd",
"index": 2425,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif download_dir != '':\n os.chdir(download_dir)\n response = requests.get(url)\n soup = BeautifulSoup(response.text, 'html.parser')\n soup.findAll('a')\n one_a_tag = soup.findAll('a')[startindex:]\n links = [one_a_tag[i]['href'] for i in range(len(one_a_tag))]\n for link in links:\n print(link)\n download_url = url + link\n urllib.request.urlretrieve(download_url, './' + link)\n if argentina:\n subprocess.check_call(['cdo', 'sellonlatbox,-80,-44,-60,-20',\n link, link.replace('.nc', 'ARG.nc')])\n subprocess.check_call(['rm', link])\n time.sleep(1)\nelse:\n print('Please enter a valid download direction')\n",
"step-3": "<mask token>\ndownload_dir = ''\nurl = 'https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_dekad/netcdf/'\nargentina = False\nstartindex = 5\nif download_dir != '':\n os.chdir(download_dir)\n response = requests.get(url)\n soup = BeautifulSoup(response.text, 'html.parser')\n soup.findAll('a')\n one_a_tag = soup.findAll('a')[startindex:]\n links = [one_a_tag[i]['href'] for i in range(len(one_a_tag))]\n for link in links:\n print(link)\n download_url = url + link\n urllib.request.urlretrieve(download_url, './' + link)\n if argentina:\n subprocess.check_call(['cdo', 'sellonlatbox,-80,-44,-60,-20',\n link, link.replace('.nc', 'ARG.nc')])\n subprocess.check_call(['rm', link])\n time.sleep(1)\nelse:\n print('Please enter a valid download direction')\n",
"step-4": "<mask token>\nimport os\nimport requests\nimport urllib.request\nimport time\nfrom bs4 import BeautifulSoup\nimport subprocess\ndownload_dir = ''\nurl = 'https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_dekad/netcdf/'\nargentina = False\nstartindex = 5\nif download_dir != '':\n os.chdir(download_dir)\n response = requests.get(url)\n soup = BeautifulSoup(response.text, 'html.parser')\n soup.findAll('a')\n one_a_tag = soup.findAll('a')[startindex:]\n links = [one_a_tag[i]['href'] for i in range(len(one_a_tag))]\n for link in links:\n print(link)\n download_url = url + link\n urllib.request.urlretrieve(download_url, './' + link)\n if argentina:\n subprocess.check_call(['cdo', 'sellonlatbox,-80,-44,-60,-20',\n link, link.replace('.nc', 'ARG.nc')])\n subprocess.check_call(['rm', link])\n time.sleep(1)\nelse:\n print('Please enter a valid download direction')\n",
"step-5": "\"\"\"\nDownload the full CHIRPS 2.0 data for a specific type (dekads, pentads, daily ...)\nwith the possibility to automatically recut the data over Argentina.\n\"\"\"\nimport os\nimport requests\nimport urllib.request\nimport time\nfrom bs4 import BeautifulSoup\nimport subprocess\n\n##############\n\n# PARAMETERS to define\n\n# Set a pre-existing directory where the CHIRPS files must be saved\ndownload_dir = \"\"\n# Url for global dekad, change if you want another product\nurl = 'https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_dekad/netcdf/'\n# Recut the data over Argentina\nargentina = False\nstartindex = 5\n\n##############\nif download_dir != \"\":\n os.chdir(download_dir)\n\n response = requests.get(url)\n soup = BeautifulSoup(response.text,\"html.parser\")\n soup.findAll('a')\n \n # First link to download in the page\n # Here the index = 5 is valid for the dekad link but it may change if you download another product (ex : daily, dekad, monthly)\n # To be sure you can check the link and check that it is the first year\n one_a_tag = soup.findAll('a')[startindex:] \n links = [one_a_tag[i]['href'] for i in range(len(one_a_tag))]\n\n for link in links:\n print(link)\n download_url = url + link\n urllib.request.urlretrieve(download_url,\"./\"+link)\n # Section to recut CHIRPS over Argentina\n if argentina:\n subprocess.check_call([\"cdo\", \"sellonlatbox,-80,-44,-60,-20\", link, link.replace(\".nc\", \"ARG.nc\")])\n subprocess.check_call([\"rm\", link])\n time.sleep(1)\n\nelse:\n print(\"Please enter a valid download direction\")\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from SPARQLWrapper import SPARQLWrapper, JSON
sparql = SPARQLWrapper(
'http://localhost:3030/ds/query'
)
#Pizzas
def get_response_pizzas():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:NamePizza .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#CarnesTopping
def get_response_carnes():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:CarnesTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#EmbutidosTopping
def get_response_embutidos():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:EmbutidosTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#EspeciasTopping
def get_response_especias():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:EspeciasTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#FrutasTopping
def get_response_frutas():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:FrutasTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#QuesosTopping
def get_response_quesos():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:QuesosTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#SalsasTopping
def get_response_salsas():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:SalsasTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
#VegetalesTopping
def get_response_vegetales():
sparql.setQuery('''
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>
SELECT DISTINCT ?name
WHERE {
?s rdfs:subClassOf saidi:VegetalesTopping .
?s rdfs:label ?name
FILTER (lang(?name) = 'es')
}
''')
sparql.setReturnFormat(JSON)
qres = sparql.query().convert()
return qres
if __name__ == '__main__':
get_response_pizzas()
get_response_carnes()
get_response_embutidos()
get_response_especias()
get_response_frutas()
get_response_quesos()
get_response_salsas()
get_response_vegetales()
|
normal
|
{
"blob_id": "9690366a88a87951f5c51902118888cce8159ffc",
"index": 7219,
"step-1": "<mask token>\n\n\ndef get_response_carnes():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:CarnesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_embutidos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EmbutidosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_especias():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EspeciasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_frutas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:FrutasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n<mask token>\n\n\ndef get_response_salsas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:SalsasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_vegetales():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:VegetalesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_response_carnes():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:CarnesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_embutidos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EmbutidosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_especias():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EspeciasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_frutas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:FrutasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_quesos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:QuesosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_salsas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:SalsasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_vegetales():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:VegetalesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef get_response_pizzas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:NamePizza .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_carnes():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:CarnesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_embutidos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EmbutidosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_especias():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EspeciasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_frutas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:FrutasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_quesos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:QuesosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_salsas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:SalsasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_vegetales():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:VegetalesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n<mask token>\n",
"step-4": "<mask token>\nsparql = SPARQLWrapper('http://localhost:3030/ds/query')\n\n\ndef get_response_pizzas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:NamePizza .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_carnes():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:CarnesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_embutidos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EmbutidosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_especias():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EspeciasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_frutas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:FrutasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_quesos():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:QuesosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_salsas():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:SalsasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\ndef get_response_vegetales():\n sparql.setQuery(\n \"\"\"\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:VegetalesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n \"\"\"\n )\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\nif __name__ == '__main__':\n get_response_pizzas()\n get_response_carnes()\n get_response_embutidos()\n get_response_especias()\n get_response_frutas()\n get_response_quesos()\n get_response_salsas()\n get_response_vegetales()\n",
"step-5": "from SPARQLWrapper import SPARQLWrapper, JSON\n\nsparql = SPARQLWrapper(\n 'http://localhost:3030/ds/query'\n \n )\n\n#Pizzas\ndef get_response_pizzas():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:NamePizza .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n#CarnesTopping\ndef get_response_carnes():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:CarnesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n#EmbutidosTopping\ndef get_response_embutidos():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EmbutidosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n#EspeciasTopping\ndef get_response_especias():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:EspeciasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n#FrutasTopping\ndef get_response_frutas():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:FrutasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n#QuesosTopping\ndef get_response_quesos():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:QuesosTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n#SalsasTopping\ndef get_response_salsas():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:SalsasTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\n#VegetalesTopping\ndef get_response_vegetales():\n sparql.setQuery('''\n PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n PREFIX saidi: <http://www.semanticweb.org/japor/ontologies/2021/5/PizzasLojanitas#>\n SELECT DISTINCT ?name \n WHERE { \n ?s rdfs:subClassOf saidi:VegetalesTopping .\n ?s rdfs:label ?name\n FILTER (lang(?name) = 'es')\n }\n\n ''')\n sparql.setReturnFormat(JSON)\n qres = sparql.query().convert()\n return qres\n\n\nif __name__ == '__main__':\n get_response_pizzas()\n get_response_carnes()\n get_response_embutidos()\n get_response_especias()\n get_response_frutas()\n get_response_quesos()\n get_response_salsas()\n get_response_vegetales()\n\n",
"step-ids": [
6,
7,
8,
10,
12
]
}
|
[
6,
7,
8,
10,
12
] |
from core.models import AnalyticsCacheSearchKeywordDay
from datetime import datetime, timedelta
def get_month():
return ["2017-10","2017-11","2017-12","2018-1","2018-2","2018-3","2018-4","2018-5","2018-6","2018-7","2018-8","2018-9","2018-10","2018-11", "2018-12"]
def run():
day = datetime.strptime("2017-10", "%Y-%m")
next_day = datetime.strptime("2017-11", "%Y-%m")
last_day = datetime.strptime("2018-11", "%Y-%m")
monthes = get_month()
result_keyword = {}
result_count = {}
dict_total = {}
idx = 1
while day < last_day:
keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(theday__gte=day, theday__lt=next_day)
date = str(day.year) + "-" + str(day.month)
result_keyword[date] = []
result_count[date] = []
dict_month = {}
for keyword in keyword_caches:
word = keyword.keyword.replace(" ", "")
if dict_total.get(word) is None:
dict_total[word] = 0
if dict_month.get(word) is None:
dict_month[word] = 0
dict_total[word] += keyword.total_count
dict_month[word] += keyword.total_count
sort_ids = sorted(dict_month, key=lambda x:dict_month[x], reverse=True)
cnt = 0
for id in sort_ids:
if cnt > 99:
break
result_keyword[date].append(id)
result_count[date].append(dict_month[id])
cnt+=1
day = datetime.strptime(monthes[idx], "%Y-%m")
next_day = datetime.strptime(monthes[idx+1], "%Y-%m")
idx+=1
sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)
total_rank_keyword = []
total_rank_count = []
for id in sorted_ids:
total_rank_keyword.append(id)
total_rank_count.append(dict_total[id])
with open("result.txt", "w") as f:
monthes = get_month()
for month in monthes:
if month == "2018-11" or month == "2018-12":
continue
print(month, file=f, end='\t')
print(" ", file=f, end='\t')
print("합산TOP100", file=f, end='\n')
for rank in range(0,100):
for month in monthes:
if month == "2018-11" or month == "2018-12":
continue
if result_keyword.get(month) is None:
print(" ", file=f, end='\t')
print(" ", file=f, end='\t')
continue
if len(result_keyword[month]) < rank+1:
print(" ", file=f, end='\t')
print(" ", file=f, end='\t')
continue
print(result_keyword[month][rank], file=f, end='\t')
print(result_count[month][rank], file=f, end='\t')
print(total_rank_keyword[rank], file=f, end='\t')
print(total_rank_count[rank], file=f, end='\n')
|
normal
|
{
"blob_id": "b048319a2ed182e70aa7f8a736ff02953577ec39",
"index": 2008,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef run():\n day = datetime.strptime('2017-10', '%Y-%m')\n next_day = datetime.strptime('2017-11', '%Y-%m')\n last_day = datetime.strptime('2018-11', '%Y-%m')\n monthes = get_month()\n result_keyword = {}\n result_count = {}\n dict_total = {}\n idx = 1\n while day < last_day:\n keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(\n theday__gte=day, theday__lt=next_day)\n date = str(day.year) + '-' + str(day.month)\n result_keyword[date] = []\n result_count[date] = []\n dict_month = {}\n for keyword in keyword_caches:\n word = keyword.keyword.replace(' ', '')\n if dict_total.get(word) is None:\n dict_total[word] = 0\n if dict_month.get(word) is None:\n dict_month[word] = 0\n dict_total[word] += keyword.total_count\n dict_month[word] += keyword.total_count\n sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True\n )\n cnt = 0\n for id in sort_ids:\n if cnt > 99:\n break\n result_keyword[date].append(id)\n result_count[date].append(dict_month[id])\n cnt += 1\n day = datetime.strptime(monthes[idx], '%Y-%m')\n next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')\n idx += 1\n sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)\n total_rank_keyword = []\n total_rank_count = []\n for id in sorted_ids:\n total_rank_keyword.append(id)\n total_rank_count.append(dict_total[id])\n with open('result.txt', 'w') as f:\n monthes = get_month()\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n print(month, file=f, end='\\t')\n print(' ', file=f, end='\\t')\n print('합산TOP100', file=f, end='\\n')\n for rank in range(0, 100):\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n if result_keyword.get(month) is None:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n if len(result_keyword[month]) < rank + 1:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n print(result_keyword[month][rank], file=f, end='\\t')\n print(result_count[month][rank], file=f, end='\\t')\n print(total_rank_keyword[rank], file=f, end='\\t')\n print(total_rank_count[rank], file=f, end='\\n')\n",
"step-3": "<mask token>\n\n\ndef get_month():\n return ['2017-10', '2017-11', '2017-12', '2018-1', '2018-2', '2018-3',\n '2018-4', '2018-5', '2018-6', '2018-7', '2018-8', '2018-9',\n '2018-10', '2018-11', '2018-12']\n\n\ndef run():\n day = datetime.strptime('2017-10', '%Y-%m')\n next_day = datetime.strptime('2017-11', '%Y-%m')\n last_day = datetime.strptime('2018-11', '%Y-%m')\n monthes = get_month()\n result_keyword = {}\n result_count = {}\n dict_total = {}\n idx = 1\n while day < last_day:\n keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(\n theday__gte=day, theday__lt=next_day)\n date = str(day.year) + '-' + str(day.month)\n result_keyword[date] = []\n result_count[date] = []\n dict_month = {}\n for keyword in keyword_caches:\n word = keyword.keyword.replace(' ', '')\n if dict_total.get(word) is None:\n dict_total[word] = 0\n if dict_month.get(word) is None:\n dict_month[word] = 0\n dict_total[word] += keyword.total_count\n dict_month[word] += keyword.total_count\n sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True\n )\n cnt = 0\n for id in sort_ids:\n if cnt > 99:\n break\n result_keyword[date].append(id)\n result_count[date].append(dict_month[id])\n cnt += 1\n day = datetime.strptime(monthes[idx], '%Y-%m')\n next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')\n idx += 1\n sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)\n total_rank_keyword = []\n total_rank_count = []\n for id in sorted_ids:\n total_rank_keyword.append(id)\n total_rank_count.append(dict_total[id])\n with open('result.txt', 'w') as f:\n monthes = get_month()\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n print(month, file=f, end='\\t')\n print(' ', file=f, end='\\t')\n print('합산TOP100', file=f, end='\\n')\n for rank in range(0, 100):\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n if result_keyword.get(month) is None:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n if len(result_keyword[month]) < rank + 1:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n print(result_keyword[month][rank], file=f, end='\\t')\n print(result_count[month][rank], file=f, end='\\t')\n print(total_rank_keyword[rank], file=f, end='\\t')\n print(total_rank_count[rank], file=f, end='\\n')\n",
"step-4": "from core.models import AnalyticsCacheSearchKeywordDay\nfrom datetime import datetime, timedelta\n\n\ndef get_month():\n return ['2017-10', '2017-11', '2017-12', '2018-1', '2018-2', '2018-3',\n '2018-4', '2018-5', '2018-6', '2018-7', '2018-8', '2018-9',\n '2018-10', '2018-11', '2018-12']\n\n\ndef run():\n day = datetime.strptime('2017-10', '%Y-%m')\n next_day = datetime.strptime('2017-11', '%Y-%m')\n last_day = datetime.strptime('2018-11', '%Y-%m')\n monthes = get_month()\n result_keyword = {}\n result_count = {}\n dict_total = {}\n idx = 1\n while day < last_day:\n keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(\n theday__gte=day, theday__lt=next_day)\n date = str(day.year) + '-' + str(day.month)\n result_keyword[date] = []\n result_count[date] = []\n dict_month = {}\n for keyword in keyword_caches:\n word = keyword.keyword.replace(' ', '')\n if dict_total.get(word) is None:\n dict_total[word] = 0\n if dict_month.get(word) is None:\n dict_month[word] = 0\n dict_total[word] += keyword.total_count\n dict_month[word] += keyword.total_count\n sort_ids = sorted(dict_month, key=lambda x: dict_month[x], reverse=True\n )\n cnt = 0\n for id in sort_ids:\n if cnt > 99:\n break\n result_keyword[date].append(id)\n result_count[date].append(dict_month[id])\n cnt += 1\n day = datetime.strptime(monthes[idx], '%Y-%m')\n next_day = datetime.strptime(monthes[idx + 1], '%Y-%m')\n idx += 1\n sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)\n total_rank_keyword = []\n total_rank_count = []\n for id in sorted_ids:\n total_rank_keyword.append(id)\n total_rank_count.append(dict_total[id])\n with open('result.txt', 'w') as f:\n monthes = get_month()\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n print(month, file=f, end='\\t')\n print(' ', file=f, end='\\t')\n print('합산TOP100', file=f, end='\\n')\n for rank in range(0, 100):\n for month in monthes:\n if month == '2018-11' or month == '2018-12':\n continue\n if result_keyword.get(month) is None:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n if len(result_keyword[month]) < rank + 1:\n print(' ', file=f, end='\\t')\n print(' ', file=f, end='\\t')\n continue\n print(result_keyword[month][rank], file=f, end='\\t')\n print(result_count[month][rank], file=f, end='\\t')\n print(total_rank_keyword[rank], file=f, end='\\t')\n print(total_rank_count[rank], file=f, end='\\n')\n",
"step-5": "from core.models import AnalyticsCacheSearchKeywordDay\nfrom datetime import datetime, timedelta\n\n\ndef get_month():\n\n return [\"2017-10\",\"2017-11\",\"2017-12\",\"2018-1\",\"2018-2\",\"2018-3\",\"2018-4\",\"2018-5\",\"2018-6\",\"2018-7\",\"2018-8\",\"2018-9\",\"2018-10\",\"2018-11\", \"2018-12\"]\n\n\ndef run():\n\n day = datetime.strptime(\"2017-10\", \"%Y-%m\")\n next_day = datetime.strptime(\"2017-11\", \"%Y-%m\")\n last_day = datetime.strptime(\"2018-11\", \"%Y-%m\")\n monthes = get_month()\n result_keyword = {}\n result_count = {}\n dict_total = {}\n idx = 1\n while day < last_day:\n keyword_caches = AnalyticsCacheSearchKeywordDay.objects.filter(theday__gte=day, theday__lt=next_day)\n date = str(day.year) + \"-\" + str(day.month)\n result_keyword[date] = []\n result_count[date] = []\n dict_month = {}\n for keyword in keyword_caches:\n\n word = keyword.keyword.replace(\" \", \"\")\n if dict_total.get(word) is None:\n dict_total[word] = 0\n if dict_month.get(word) is None:\n dict_month[word] = 0\n dict_total[word] += keyword.total_count\n dict_month[word] += keyword.total_count\n\n sort_ids = sorted(dict_month, key=lambda x:dict_month[x], reverse=True)\n cnt = 0\n for id in sort_ids:\n if cnt > 99:\n break\n result_keyword[date].append(id)\n result_count[date].append(dict_month[id])\n cnt+=1\n\n day = datetime.strptime(monthes[idx], \"%Y-%m\")\n next_day = datetime.strptime(monthes[idx+1], \"%Y-%m\")\n idx+=1\n\n sorted_ids = sorted(dict_total, key=lambda x: dict_total[x], reverse=True)\n total_rank_keyword = []\n total_rank_count = []\n for id in sorted_ids:\n total_rank_keyword.append(id)\n total_rank_count.append(dict_total[id])\n\n with open(\"result.txt\", \"w\") as f:\n monthes = get_month()\n for month in monthes:\n if month == \"2018-11\" or month == \"2018-12\":\n continue\n print(month, file=f, end='\\t')\n print(\" \", file=f, end='\\t')\n print(\"합산TOP100\", file=f, end='\\n')\n for rank in range(0,100):\n for month in monthes:\n if month == \"2018-11\" or month == \"2018-12\":\n continue\n if result_keyword.get(month) is None:\n print(\" \", file=f, end='\\t')\n print(\" \", file=f, end='\\t')\n continue\n if len(result_keyword[month]) < rank+1:\n print(\" \", file=f, end='\\t')\n print(\" \", file=f, end='\\t')\n continue\n print(result_keyword[month][rank], file=f, end='\\t')\n print(result_count[month][rank], file=f, end='\\t')\n print(total_rank_keyword[rank], file=f, end='\\t')\n print(total_rank_count[rank], file=f, end='\\n')",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.http import HttpResponse
from django.shortcuts import render
from .models import game
def index(request):
all_games = game.objects.all()
context = {
'all_games' : all_games
}
return render(request,'game/index.html',context)
def gameview(response):
return HttpResponse("<h1>Ludo King</h1>")
|
normal
|
{
"blob_id": "6623ac194e380c9554d72a1b20bf860b958dda97",
"index": 5961,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n",
"step-4": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\n\ndef index(request):\n all_games = game.objects.all()\n context = {'all_games': all_games}\n return render(request, 'game/index.html', context)\n\n\ndef gameview(response):\n return HttpResponse('<h1>Ludo King</h1>')\n",
"step-5": "from django.http import HttpResponse\nfrom django.shortcuts import render\nfrom .models import game\n\ndef index(request):\n all_games = game.objects.all()\n context = {\n 'all_games' : all_games\n }\n return render(request,'game/index.html',context)\n\ndef gameview(response):\n return HttpResponse(\"<h1>Ludo King</h1>\")\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Standard library
# Third party library
# Local library
from warehouse.server import run_server
from warehouse.server.config import log
if __name__ == "__main__":
log.initialize_logs()
run_server()
|
normal
|
{
"blob_id": "8c8b5c1ff749a8563788b8d5be5332e273275be3",
"index": 6450,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n log.initialize_logs()\n run_server()\n",
"step-3": "from warehouse.server import run_server\nfrom warehouse.server.config import log\nif __name__ == '__main__':\n log.initialize_logs()\n run_server()\n",
"step-4": "# Standard library\n# Third party library\n# Local library\nfrom warehouse.server import run_server\nfrom warehouse.server.config import log\n\n\nif __name__ == \"__main__\":\n log.initialize_logs()\n run_server()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from typing import Any, Dict, List
import numpy as np
from kedro.io import AbstractDataSet
from msrest.exceptions import HttpOperationError
from azureml.core import Workspace, Datastore
from azureml.data.data_reference import DataReference
class AZblob_datastore_data(AbstractDataSet):
"""``ImageDataSet`` loads / save image data from a given filepath as `numpy` array using Pillow.
Example:
::
>>> ImageDataSet(filepath='/img/file/path.png')
"""
def __init__(self,
container_path: str,
local_path : str,
credentials: Dict[str, Any] = None):
"""Creates a new instance of ImageDataSet to load / save image data at the given filepath.
Args:
filepath: The location of the image file to load / save data.
"""
self._container_path = container_path
self._local_path = local_path
self._credentials = credentials
def _load(self) -> np.ndarray:
"""Loads data from the image file.
Returns:
Data from the image file as a numpy array.
"""
# Initialis Workspace
ws = Workspace.from_config()
blob_datastore_name = self._credentials['storage_name']
account_name = self._credentials['storage_name'] # Storage account name
container_name = self._credentials['container_name'] # Name of Azure blob container
account_key = self._credentials['key'] # Storage account key
# Register a new datastore
try:
blob_datastore = blob_datastore = Datastore.get(ws, blob_datastore_name)
print("Found Blob Datastore with name: %s" % blob_datastore_name)
except HttpOperationError:
blob_datastore = Datastore.register_azure_blob_container(workspace = ws,
datastore_name = blob_datastore_name,
container_name = container_name,
account_name = account_name,
blob_datastore.download(target_path=self._local_path,
prefix=self._container_path,
show_progress=False)
...
def _save(self, data: np.ndarray) -> None:
"""Saves image data to the specified filepath"""
...
def _describe(self) -> Dict[str, Any]:
"""Returns a dict that describes the attributes of the dataset"""
|
normal
|
{
"blob_id": "eb981a2d7f0ff5e6cc4a4a76f269c93c547965ba",
"index": 715,
"step-1": "from typing import Any, Dict, List\n\nimport numpy as np\n\nfrom kedro.io import AbstractDataSet\nfrom msrest.exceptions import HttpOperationError\nfrom azureml.core import Workspace, Datastore\nfrom azureml.data.data_reference import DataReference\n\nclass AZblob_datastore_data(AbstractDataSet):\n \"\"\"``ImageDataSet`` loads / save image data from a given filepath as `numpy` array using Pillow.\n\n Example:\n ::\n\n >>> ImageDataSet(filepath='/img/file/path.png')\n \"\"\"\n\n def __init__(self,\n container_path: str,\n local_path : str,\n credentials: Dict[str, Any] = None):\n \"\"\"Creates a new instance of ImageDataSet to load / save image data at the given filepath.\n\n Args:\n filepath: The location of the image file to load / save data.\n \"\"\"\n self._container_path = container_path\n self._local_path = local_path\n self._credentials = credentials\n\n def _load(self) -> np.ndarray:\n \"\"\"Loads data from the image file.\n\n Returns:\n Data from the image file as a numpy array.\n \"\"\"\n # Initialis Workspace\n\n ws = Workspace.from_config()\n\n blob_datastore_name = self._credentials['storage_name']\n account_name = self._credentials['storage_name'] # Storage account name\n container_name = self._credentials['container_name'] # Name of Azure blob container\n account_key = self._credentials['key'] # Storage account key\n\n # Register a new datastore\n try:\n blob_datastore = blob_datastore = Datastore.get(ws, blob_datastore_name)\n print(\"Found Blob Datastore with name: %s\" % blob_datastore_name)\n\n except HttpOperationError:\n blob_datastore = Datastore.register_azure_blob_container(workspace = ws, \n datastore_name = blob_datastore_name, \n container_name = container_name,\n account_name = account_name,\n blob_datastore.download(target_path=self._local_path,\n prefix=self._container_path,\n show_progress=False) \n ...\n\n def _save(self, data: np.ndarray) -> None:\n \"\"\"Saves image data to the specified filepath\"\"\"\n ...\n\n def _describe(self) -> Dict[str, Any]:\n \n \"\"\"Returns a dict that describes the attributes of the dataset\"\"\"",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# -*- coding:utf-8 -*-
from spider.driver.base.driver import Driver
from spider.driver.base.mysql import Mysql
import time
from pyquery import PyQuery
from spider.driver.base.field import Field,FieldName,Fieldlist,FieldType
from spider.driver.base.page import Page
from spider.driver.base.listcssselector import ListCssSelector
from spider.driver.base.mongodb import Mongodb
from spider.driver.base.tabsetup import TabSetup
fl_weixin1 = Fieldlist(
Field(fieldname='public_name', css_selector='div > div.txt-box > p.tit > a', regex=r'[^\u4e00-\u9fa5]*'),
)
fl_weixin2 = Fieldlist(
Field(fieldname='article_name', css_selector='div > div > h4'),
Field(fieldname='article_time', css_selector='div > div > p.weui_media_extra_info'),
)
page_weixin_1 = Page(name='微信公众号列表页面', fieldlist=fl_weixin1, listcssselector=ListCssSelector(list_css_selector='#main > div.news-box > ul > li'))
page_weixin_2 = Page(name='微信公众号文章列表页面', fieldlist=fl_weixin2, tabsetup=TabSetup(click_css_selector='div > div.txt-box > p.tit > a'), listcssselector=ListCssSelector(list_css_selector='#history > div'))
class WeixinSpider(Driver):
def __init__(self,isheadless=False,ismobile=False,isvirtualdisplay=False,spider_id='',name=''):
Driver.__init__(self, log_file_name=spider_id, ismobile=ismobile, isvirtualdisplay=isvirtualdisplay,
isheadless=isheadless)
self.name = name
self.debug_log(name=name)
def get_article(self, data_list=[]):
article_list = self.until_presence_of_all_elements_located_by_css_selector(css_selector=page_weixin_2.listcssselector.list_css_selector)
for i in range(1, len(article_list)+1):
self.until_scroll_to_center_click_by_css_selector(css_selector='%s:nth-child(%s)'%(page_weixin_2.listcssselector.list_css_selector,i))
time.sleep(3)
self.driver.back()
def run_spider(self):
for public in Mysql().query_data(table='weixin_public', field='public_name')[:1]:
self.fast_get_page(url='http://weixin.sogou.com/', min_time_to_wait=15,max_time_to_wait=30)
self.until_send_text_by_css_selector(css_selector='#query', text=public[0])
time.sleep(3)
self.fast_enter_page_by_css_selector(css_selector='#query')
time.sleep(2)
self.fast_click_same_page_by_css_selector(click_css_selector='#scroll-header > form > div > input.swz2')
public_name_list = self.from_page_get_data_list(page=page_weixin_1)
article_name_list = self.from_page_add_data_list_to_data_list(page=page_weixin_2, pre_page=page_weixin_1,data_list=public_name_list, extra_page_func=self.get_article)
# self.fast_click_page_by_css_selector(ele=item, click_css_selector='div > div.txt-box > p.tit > a')
# self.driver.switch_to.window(self.driver.window_handles[-1])
# shop_data_list = self.from_page_get_data_list(page=page_weixin_1)
# self.driver.close()
# self.driver.switch_to.window(self.driver.window_handles[-1])
|
normal
|
{
"blob_id": "1a7a28a2264ed0204184ab1dd273b0b114657fa7",
"index": 3004,
"step-1": "<mask token>\n\n\nclass WeixinSpider(Driver):\n <mask token>\n\n def get_article(self, data_list=[]):\n article_list = (self.\n until_presence_of_all_elements_located_by_css_selector(\n css_selector=page_weixin_2.listcssselector.list_css_selector))\n for i in range(1, len(article_list) + 1):\n self.until_scroll_to_center_click_by_css_selector(css_selector=\n '%s:nth-child(%s)' % (page_weixin_2.listcssselector.\n list_css_selector, i))\n time.sleep(3)\n self.driver.back()\n\n def run_spider(self):\n for public in Mysql().query_data(table='weixin_public', field=\n 'public_name')[:1]:\n self.fast_get_page(url='http://weixin.sogou.com/',\n min_time_to_wait=15, max_time_to_wait=30)\n self.until_send_text_by_css_selector(css_selector='#query',\n text=public[0])\n time.sleep(3)\n self.fast_enter_page_by_css_selector(css_selector='#query')\n time.sleep(2)\n self.fast_click_same_page_by_css_selector(click_css_selector=\n '#scroll-header > form > div > input.swz2')\n public_name_list = self.from_page_get_data_list(page=page_weixin_1)\n article_name_list = self.from_page_add_data_list_to_data_list(page\n =page_weixin_2, pre_page=page_weixin_1, data_list=\n public_name_list, extra_page_func=self.get_article)\n",
"step-2": "<mask token>\n\n\nclass WeixinSpider(Driver):\n\n def __init__(self, isheadless=False, ismobile=False, isvirtualdisplay=\n False, spider_id='', name=''):\n Driver.__init__(self, log_file_name=spider_id, ismobile=ismobile,\n isvirtualdisplay=isvirtualdisplay, isheadless=isheadless)\n self.name = name\n self.debug_log(name=name)\n\n def get_article(self, data_list=[]):\n article_list = (self.\n until_presence_of_all_elements_located_by_css_selector(\n css_selector=page_weixin_2.listcssselector.list_css_selector))\n for i in range(1, len(article_list) + 1):\n self.until_scroll_to_center_click_by_css_selector(css_selector=\n '%s:nth-child(%s)' % (page_weixin_2.listcssselector.\n list_css_selector, i))\n time.sleep(3)\n self.driver.back()\n\n def run_spider(self):\n for public in Mysql().query_data(table='weixin_public', field=\n 'public_name')[:1]:\n self.fast_get_page(url='http://weixin.sogou.com/',\n min_time_to_wait=15, max_time_to_wait=30)\n self.until_send_text_by_css_selector(css_selector='#query',\n text=public[0])\n time.sleep(3)\n self.fast_enter_page_by_css_selector(css_selector='#query')\n time.sleep(2)\n self.fast_click_same_page_by_css_selector(click_css_selector=\n '#scroll-header > form > div > input.swz2')\n public_name_list = self.from_page_get_data_list(page=page_weixin_1)\n article_name_list = self.from_page_add_data_list_to_data_list(page\n =page_weixin_2, pre_page=page_weixin_1, data_list=\n public_name_list, extra_page_func=self.get_article)\n",
"step-3": "<mask token>\nfl_weixin1 = Fieldlist(Field(fieldname='public_name', css_selector=\n 'div > div.txt-box > p.tit > a', regex='[^\\\\u4e00-\\\\u9fa5]*'))\nfl_weixin2 = Fieldlist(Field(fieldname='article_name', css_selector=\n 'div > div > h4'), Field(fieldname='article_time', css_selector=\n 'div > div > p.weui_media_extra_info'))\npage_weixin_1 = Page(name='微信公众号列表页面', fieldlist=fl_weixin1,\n listcssselector=ListCssSelector(list_css_selector=\n '#main > div.news-box > ul > li'))\npage_weixin_2 = Page(name='微信公众号文章列表页面', fieldlist=fl_weixin2, tabsetup=\n TabSetup(click_css_selector='div > div.txt-box > p.tit > a'),\n listcssselector=ListCssSelector(list_css_selector='#history > div'))\n\n\nclass WeixinSpider(Driver):\n\n def __init__(self, isheadless=False, ismobile=False, isvirtualdisplay=\n False, spider_id='', name=''):\n Driver.__init__(self, log_file_name=spider_id, ismobile=ismobile,\n isvirtualdisplay=isvirtualdisplay, isheadless=isheadless)\n self.name = name\n self.debug_log(name=name)\n\n def get_article(self, data_list=[]):\n article_list = (self.\n until_presence_of_all_elements_located_by_css_selector(\n css_selector=page_weixin_2.listcssselector.list_css_selector))\n for i in range(1, len(article_list) + 1):\n self.until_scroll_to_center_click_by_css_selector(css_selector=\n '%s:nth-child(%s)' % (page_weixin_2.listcssselector.\n list_css_selector, i))\n time.sleep(3)\n self.driver.back()\n\n def run_spider(self):\n for public in Mysql().query_data(table='weixin_public', field=\n 'public_name')[:1]:\n self.fast_get_page(url='http://weixin.sogou.com/',\n min_time_to_wait=15, max_time_to_wait=30)\n self.until_send_text_by_css_selector(css_selector='#query',\n text=public[0])\n time.sleep(3)\n self.fast_enter_page_by_css_selector(css_selector='#query')\n time.sleep(2)\n self.fast_click_same_page_by_css_selector(click_css_selector=\n '#scroll-header > form > div > input.swz2')\n public_name_list = self.from_page_get_data_list(page=page_weixin_1)\n article_name_list = self.from_page_add_data_list_to_data_list(page\n =page_weixin_2, pre_page=page_weixin_1, data_list=\n public_name_list, extra_page_func=self.get_article)\n",
"step-4": "from spider.driver.base.driver import Driver\nfrom spider.driver.base.mysql import Mysql\nimport time\nfrom pyquery import PyQuery\nfrom spider.driver.base.field import Field, FieldName, Fieldlist, FieldType\nfrom spider.driver.base.page import Page\nfrom spider.driver.base.listcssselector import ListCssSelector\nfrom spider.driver.base.mongodb import Mongodb\nfrom spider.driver.base.tabsetup import TabSetup\nfl_weixin1 = Fieldlist(Field(fieldname='public_name', css_selector=\n 'div > div.txt-box > p.tit > a', regex='[^\\\\u4e00-\\\\u9fa5]*'))\nfl_weixin2 = Fieldlist(Field(fieldname='article_name', css_selector=\n 'div > div > h4'), Field(fieldname='article_time', css_selector=\n 'div > div > p.weui_media_extra_info'))\npage_weixin_1 = Page(name='微信公众号列表页面', fieldlist=fl_weixin1,\n listcssselector=ListCssSelector(list_css_selector=\n '#main > div.news-box > ul > li'))\npage_weixin_2 = Page(name='微信公众号文章列表页面', fieldlist=fl_weixin2, tabsetup=\n TabSetup(click_css_selector='div > div.txt-box > p.tit > a'),\n listcssselector=ListCssSelector(list_css_selector='#history > div'))\n\n\nclass WeixinSpider(Driver):\n\n def __init__(self, isheadless=False, ismobile=False, isvirtualdisplay=\n False, spider_id='', name=''):\n Driver.__init__(self, log_file_name=spider_id, ismobile=ismobile,\n isvirtualdisplay=isvirtualdisplay, isheadless=isheadless)\n self.name = name\n self.debug_log(name=name)\n\n def get_article(self, data_list=[]):\n article_list = (self.\n until_presence_of_all_elements_located_by_css_selector(\n css_selector=page_weixin_2.listcssselector.list_css_selector))\n for i in range(1, len(article_list) + 1):\n self.until_scroll_to_center_click_by_css_selector(css_selector=\n '%s:nth-child(%s)' % (page_weixin_2.listcssselector.\n list_css_selector, i))\n time.sleep(3)\n self.driver.back()\n\n def run_spider(self):\n for public in Mysql().query_data(table='weixin_public', field=\n 'public_name')[:1]:\n self.fast_get_page(url='http://weixin.sogou.com/',\n min_time_to_wait=15, max_time_to_wait=30)\n self.until_send_text_by_css_selector(css_selector='#query',\n text=public[0])\n time.sleep(3)\n self.fast_enter_page_by_css_selector(css_selector='#query')\n time.sleep(2)\n self.fast_click_same_page_by_css_selector(click_css_selector=\n '#scroll-header > form > div > input.swz2')\n public_name_list = self.from_page_get_data_list(page=page_weixin_1)\n article_name_list = self.from_page_add_data_list_to_data_list(page\n =page_weixin_2, pre_page=page_weixin_1, data_list=\n public_name_list, extra_page_func=self.get_article)\n",
"step-5": "# -*- coding:utf-8 -*-\nfrom spider.driver.base.driver import Driver\nfrom spider.driver.base.mysql import Mysql\nimport time\nfrom pyquery import PyQuery\nfrom spider.driver.base.field import Field,FieldName,Fieldlist,FieldType\nfrom spider.driver.base.page import Page\nfrom spider.driver.base.listcssselector import ListCssSelector\nfrom spider.driver.base.mongodb import Mongodb\nfrom spider.driver.base.tabsetup import TabSetup\n\nfl_weixin1 = Fieldlist(\n Field(fieldname='public_name', css_selector='div > div.txt-box > p.tit > a', regex=r'[^\\u4e00-\\u9fa5]*'),\n)\n\nfl_weixin2 = Fieldlist(\n Field(fieldname='article_name', css_selector='div > div > h4'),\n Field(fieldname='article_time', css_selector='div > div > p.weui_media_extra_info'),\n)\n\npage_weixin_1 = Page(name='微信公众号列表页面', fieldlist=fl_weixin1, listcssselector=ListCssSelector(list_css_selector='#main > div.news-box > ul > li'))\n\npage_weixin_2 = Page(name='微信公众号文章列表页面', fieldlist=fl_weixin2, tabsetup=TabSetup(click_css_selector='div > div.txt-box > p.tit > a'), listcssselector=ListCssSelector(list_css_selector='#history > div'))\n\nclass WeixinSpider(Driver):\n\n def __init__(self,isheadless=False,ismobile=False,isvirtualdisplay=False,spider_id='',name=''):\n Driver.__init__(self, log_file_name=spider_id, ismobile=ismobile, isvirtualdisplay=isvirtualdisplay,\n isheadless=isheadless)\n self.name = name\n self.debug_log(name=name)\n\n def get_article(self, data_list=[]):\n article_list = self.until_presence_of_all_elements_located_by_css_selector(css_selector=page_weixin_2.listcssselector.list_css_selector)\n for i in range(1, len(article_list)+1):\n self.until_scroll_to_center_click_by_css_selector(css_selector='%s:nth-child(%s)'%(page_weixin_2.listcssselector.list_css_selector,i))\n time.sleep(3)\n self.driver.back()\n\n def run_spider(self):\n for public in Mysql().query_data(table='weixin_public', field='public_name')[:1]:\n self.fast_get_page(url='http://weixin.sogou.com/', min_time_to_wait=15,max_time_to_wait=30)\n self.until_send_text_by_css_selector(css_selector='#query', text=public[0])\n time.sleep(3)\n self.fast_enter_page_by_css_selector(css_selector='#query')\n time.sleep(2)\n self.fast_click_same_page_by_css_selector(click_css_selector='#scroll-header > form > div > input.swz2')\n public_name_list = self.from_page_get_data_list(page=page_weixin_1)\n article_name_list = self.from_page_add_data_list_to_data_list(page=page_weixin_2, pre_page=page_weixin_1,data_list=public_name_list, extra_page_func=self.get_article)\n # self.fast_click_page_by_css_selector(ele=item, click_css_selector='div > div.txt-box > p.tit > a')\n # self.driver.switch_to.window(self.driver.window_handles[-1])\n # shop_data_list = self.from_page_get_data_list(page=page_weixin_1)\n # self.driver.close()\n # self.driver.switch_to.window(self.driver.window_handles[-1])",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from marshmallow import ValidationError
from werkzeug.exceptions import HTTPException
from flask_jwt_extended.exceptions import JWTExtendedException
from memedata.util import mk_errors
from memedata import config
def jwt_error_handler(error):
code = 401
messages = list(getattr(error, 'args', []))
return mk_errors(code, messages)
def http_error_handler(error):
resp = error.response
if resp is None:
code = error.code
messages = [error.description]
else:
code = getattr(resp, 'status_code', 500)
json = resp.get_json()
if 'errors' in json and json['errors']:
messages = [e['message'] for e in json['errors'] if 'message' in e]
else:
messages = [str(resp.status)]
return mk_errors(code, messages)
def validation_error_handler(error):
code = getattr(error, 'status_code', 500)
messages = getattr(error, 'messages', [])
return mk_errors(code, messages)
def generic_error_handler(error):
code = getattr(error, 'status_code', 500)
if config.debug:
messages = [str(error)]
else:
messages = ['something went wrong!']
return mk_errors(code, messages)
def error_handler(error):
try:
if isinstance(error, JWTExtendedException):
return jwt_error_handler(error)
elif isinstance(error, HTTPException):
return http_error_handler(error)
elif isinstance(error, ValidationError):
return validation_error_handler(error)
else:
return generic_error_handler(error)
except:
return mk_errors(500, 'something went wrong!')
def register_handlers(app):
app.errorhandler(Exception)(error_handler)
app.errorhandler(HTTPException)(error_handler)
app.handle_user_exception = error_handler
|
normal
|
{
"blob_id": "e1da3255668999c3b77aa8c9332b197a9203478e",
"index": 8992,
"step-1": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\n<mask token>\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\n<mask token>\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-3": "<mask token>\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\ndef http_error_handler(error):\n resp = error.response\n if resp is None:\n code = error.code\n messages = [error.description]\n else:\n code = getattr(resp, 'status_code', 500)\n json = resp.get_json()\n if 'errors' in json and json['errors']:\n messages = [e['message'] for e in json['errors'] if 'message' in e]\n else:\n messages = [str(resp.status)]\n return mk_errors(code, messages)\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-4": "from marshmallow import ValidationError\nfrom werkzeug.exceptions import HTTPException\nfrom flask_jwt_extended.exceptions import JWTExtendedException\nfrom memedata.util import mk_errors\nfrom memedata import config\n\n\ndef jwt_error_handler(error):\n code = 401\n messages = list(getattr(error, 'args', []))\n return mk_errors(code, messages)\n\n\ndef http_error_handler(error):\n resp = error.response\n if resp is None:\n code = error.code\n messages = [error.description]\n else:\n code = getattr(resp, 'status_code', 500)\n json = resp.get_json()\n if 'errors' in json and json['errors']:\n messages = [e['message'] for e in json['errors'] if 'message' in e]\n else:\n messages = [str(resp.status)]\n return mk_errors(code, messages)\n\n\ndef validation_error_handler(error):\n code = getattr(error, 'status_code', 500)\n messages = getattr(error, 'messages', [])\n return mk_errors(code, messages)\n\n\ndef generic_error_handler(error):\n code = getattr(error, 'status_code', 500)\n if config.debug:\n messages = [str(error)]\n else:\n messages = ['something went wrong!']\n return mk_errors(code, messages)\n\n\ndef error_handler(error):\n try:\n if isinstance(error, JWTExtendedException):\n return jwt_error_handler(error)\n elif isinstance(error, HTTPException):\n return http_error_handler(error)\n elif isinstance(error, ValidationError):\n return validation_error_handler(error)\n else:\n return generic_error_handler(error)\n except:\n return mk_errors(500, 'something went wrong!')\n\n\ndef register_handlers(app):\n app.errorhandler(Exception)(error_handler)\n app.errorhandler(HTTPException)(error_handler)\n app.handle_user_exception = error_handler\n",
"step-5": null,
"step-ids": [
4,
5,
6,
7
]
}
|
[
4,
5,
6,
7
] |
from typing import Dict, List
from .power_bi_querier import PowerBiQuerier
class DeathsByEthnicity(PowerBiQuerier):
def __init__(self) ->None:
self.source = 'd'
self.name = 'deaths by race'
self.property = 'race'
super().__init__()
def _parse_data(self, response_json: Dict[str, List]) ->Dict[str, int]:
results = super()._parse_data(response_json)
return {ethnicity.strip(): count for ethnicity, count in results}
|
normal
|
{
"blob_id": "d975b74370acc72101f808e70bef64cee39a5ab8",
"index": 6204,
"step-1": "<mask token>\n\n\nclass DeathsByEthnicity(PowerBiQuerier):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DeathsByEthnicity(PowerBiQuerier):\n <mask token>\n\n def _parse_data(self, response_json: Dict[str, List]) ->Dict[str, int]:\n results = super()._parse_data(response_json)\n return {ethnicity.strip(): count for ethnicity, count in results}\n",
"step-3": "<mask token>\n\n\nclass DeathsByEthnicity(PowerBiQuerier):\n\n def __init__(self) ->None:\n self.source = 'd'\n self.name = 'deaths by race'\n self.property = 'race'\n super().__init__()\n\n def _parse_data(self, response_json: Dict[str, List]) ->Dict[str, int]:\n results = super()._parse_data(response_json)\n return {ethnicity.strip(): count for ethnicity, count in results}\n",
"step-4": "from typing import Dict, List\nfrom .power_bi_querier import PowerBiQuerier\n\n\nclass DeathsByEthnicity(PowerBiQuerier):\n\n def __init__(self) ->None:\n self.source = 'd'\n self.name = 'deaths by race'\n self.property = 'race'\n super().__init__()\n\n def _parse_data(self, response_json: Dict[str, List]) ->Dict[str, int]:\n results = super()._parse_data(response_json)\n return {ethnicity.strip(): count for ethnicity, count in results}\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
import PyInstaller.__main__
import os
import shutil
# Paths
basePath = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.pardir))
srcPath = os.path.join(basePath, 'src')
outPath = os.path.join(basePath, 'out')
workPath = os.path.join(outPath, 'work')
# Bundle
PyInstaller.__main__.run([
'--clean',
'--onefile',
'--workpath', workPath,
'--distpath', outPath,
'--hidden-import', 'win32timezone',
os.path.join(srcPath, 'service.py'),
os.path.join(srcPath, 'bridge.py'),
])
# Copy config files
shutil.copy2(os.path.join(srcPath, 'bridge.cfg'), outPath)
shutil.copy2(os.path.join(srcPath, 'groups.cfg'), outPath)
# Remove build artifacts
shutil.rmtree(workPath)
|
normal
|
{
"blob_id": "16a95573c4fccc10bdc5e37b307d0c85714b328c",
"index": 3548,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nPyInstaller.__main__.run(['--clean', '--onefile', '--workpath', workPath,\n '--distpath', outPath, '--hidden-import', 'win32timezone', os.path.join\n (srcPath, 'service.py'), os.path.join(srcPath, 'bridge.py')])\nshutil.copy2(os.path.join(srcPath, 'bridge.cfg'), outPath)\nshutil.copy2(os.path.join(srcPath, 'groups.cfg'), outPath)\nshutil.rmtree(workPath)\n",
"step-3": "<mask token>\nbasePath = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path\n .pardir))\nsrcPath = os.path.join(basePath, 'src')\noutPath = os.path.join(basePath, 'out')\nworkPath = os.path.join(outPath, 'work')\nPyInstaller.__main__.run(['--clean', '--onefile', '--workpath', workPath,\n '--distpath', outPath, '--hidden-import', 'win32timezone', os.path.join\n (srcPath, 'service.py'), os.path.join(srcPath, 'bridge.py')])\nshutil.copy2(os.path.join(srcPath, 'bridge.cfg'), outPath)\nshutil.copy2(os.path.join(srcPath, 'groups.cfg'), outPath)\nshutil.rmtree(workPath)\n",
"step-4": "import PyInstaller.__main__\nimport os\nimport shutil\nbasePath = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path\n .pardir))\nsrcPath = os.path.join(basePath, 'src')\noutPath = os.path.join(basePath, 'out')\nworkPath = os.path.join(outPath, 'work')\nPyInstaller.__main__.run(['--clean', '--onefile', '--workpath', workPath,\n '--distpath', outPath, '--hidden-import', 'win32timezone', os.path.join\n (srcPath, 'service.py'), os.path.join(srcPath, 'bridge.py')])\nshutil.copy2(os.path.join(srcPath, 'bridge.cfg'), outPath)\nshutil.copy2(os.path.join(srcPath, 'groups.cfg'), outPath)\nshutil.rmtree(workPath)\n",
"step-5": "import PyInstaller.__main__\nimport os\nimport shutil\n\n# Paths\nbasePath = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.pardir))\nsrcPath = os.path.join(basePath, 'src')\noutPath = os.path.join(basePath, 'out')\nworkPath = os.path.join(outPath, 'work')\n\n# Bundle\nPyInstaller.__main__.run([\n '--clean',\n '--onefile',\n '--workpath', workPath,\n '--distpath', outPath,\n '--hidden-import', 'win32timezone',\n os.path.join(srcPath, 'service.py'),\n os.path.join(srcPath, 'bridge.py'),\n])\n\n# Copy config files\nshutil.copy2(os.path.join(srcPath, 'bridge.cfg'), outPath)\nshutil.copy2(os.path.join(srcPath, 'groups.cfg'), outPath)\n\n# Remove build artifacts\nshutil.rmtree(workPath)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class Solution:
def isUgly(self, num):
if num == 0:
return False
for n in [2, 3, 5]:
while num % n == 0:
num = num / n
return num == 1
a = Solution()
print(a.isUgly(14))
print(a.isUgly(8))
print(a.isUgly(6))
print(a.isUgly(0))
|
normal
|
{
"blob_id": "d39cc2dbbc83869e559f8355ceba5cf420adea5e",
"index": 1662,
"step-1": "class Solution:\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Solution:\n\n def isUgly(self, num):\n if num == 0:\n return False\n for n in [2, 3, 5]:\n while num % n == 0:\n num = num / n\n return num == 1\n\n\n<mask token>\n",
"step-3": "class Solution:\n\n def isUgly(self, num):\n if num == 0:\n return False\n for n in [2, 3, 5]:\n while num % n == 0:\n num = num / n\n return num == 1\n\n\n<mask token>\nprint(a.isUgly(14))\nprint(a.isUgly(8))\nprint(a.isUgly(6))\nprint(a.isUgly(0))\n",
"step-4": "class Solution:\n\n def isUgly(self, num):\n if num == 0:\n return False\n for n in [2, 3, 5]:\n while num % n == 0:\n num = num / n\n return num == 1\n\n\na = Solution()\nprint(a.isUgly(14))\nprint(a.isUgly(8))\nprint(a.isUgly(6))\nprint(a.isUgly(0))\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext_lazy as _
from django import forms
from programs.models import *
from programs.forms import CustomUserCreationForm, CustomUserChangeForm
import pdb
class ProgramAdmin(admin.ModelAdmin):
list_display = ('description','get_university')
search_fields=('description','department__university__code')
list_filter = ('department__university',)
def get_university(self,obj):
return obj.department.university
def save_model(self,request,obj,form,change):
obj.code = obj.description.replace(' ','_')
obj.save()
get_university.short_description = 'University'
def change_view(self,request,object_id,extra_content=None):
self.exclude = ('',)
return super(ProgramAdmin,self).change_view(request,object_id)
def add_view(self,request,extra_content=None):
self.exclude = ('code',)
return super(ProgramAdmin,self).add_view(request)
class ProgramInline(admin.TabularInline):
model = Program
extra = 0
fields = ('description',)
class DepartmentAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields':['description','university','tenured','nonTenured']}),
]
inlines = [ProgramInline]
search_fields = ('university__description','description')
list_filter = ('description','university')
def save_model(self,request,obj,form,change):
if obj.code == '':
obj.code = obj.name.replace(' ','_')
obj.save()
class DepartmentInline(admin.TabularInline):
model = Department
extra = 0
fields = ('description',)
class UniversityAdmin(admin.ModelAdmin):
inlines = [DepartmentInline]
search_fields = ('description',)
def save_model(self,request,obj,form,change):
obj.code = obj.description.replace(' ','_')
obj.save()
def change_view(self,request,object_id,extra_content=None):
self.exclude = ('',)
return super(UniversityAdmin,self).change_view(request,object_id)
def add_view(self,request,extra_content=None):
self.exclude = ('code',)
return super(UniversityAdmin,self).add_view(request)
class CourseForm(forms.ModelForm):
class Meta:
Model = Course
def __init__(self,*args,**kwargs):
super(CourseForm,self).__init__(*args,**kwargs)
self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact=self.instance.id)
def clean(self):
#Need to handle validation for unique_together
cleaned_data = self.cleaned_data
if self.instance.pk is None:
if Course.objects.filter(code=cleaned_data['code'],university=cleaned_data['university']).exists():
raise forms.ValidationError('The course already exists at this university.')
return cleaned_data
class CourseAdmin(admin.ModelAdmin):
form = CourseForm
list_display = ('code','university',)
list_filter = ('university',)
search_fields = ('code',)
def save_model(self,request,obj,form,change):
if obj.code == '':
obj.code = obj.name.replace(' ','_')
obj.save()
class dbAdmin(UserAdmin):
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')}
),
)
form = CustomUserChangeForm
add_form = CustomUserCreationForm
list_display = ('email', 'first_name', 'last_name', 'is_staff')
search_fields = ('email', 'first_name', 'last_name')
ordering = ('email',)
admin.site.register(dbUser, dbAdmin)
admin.site.register(University,UniversityAdmin)
admin.site.register(Program,ProgramAdmin)
admin.site.register(Department,DepartmentAdmin)
admin.site.register(Course,CourseAdmin)
|
normal
|
{
"blob_id": "77e4bbe625251254cdadaeeb23dddf51e729e747",
"index": 832,
"step-1": "<mask token>\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n def get_university(self, obj):\n return obj.department.university\n <mask token>\n <mask token>\n <mask token>\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n <mask token>\n <mask token>\n <mask token>\n\n def get_university(self, obj):\n return obj.department.university\n <mask token>\n <mask token>\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ProgramAdmin(admin.ModelAdmin):\n list_display = 'description', 'get_university'\n search_fields = 'description', 'department__university__code'\n list_filter = 'department__university',\n\n def get_university(self, obj):\n return obj.department.university\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n get_university.short_description = 'University'\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(ProgramAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(ProgramAdmin, self).add_view(request)\n\n\nclass ProgramInline(admin.TabularInline):\n model = Program\n extra = 0\n fields = 'description',\n\n\nclass DepartmentAdmin(admin.ModelAdmin):\n fieldsets = [(None, {'fields': ['description', 'university', 'tenured',\n 'nonTenured']})]\n inlines = [ProgramInline]\n search_fields = 'university__description', 'description'\n list_filter = 'description', 'university'\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n model = Department\n extra = 0\n fields = 'description',\n\n\nclass UniversityAdmin(admin.ModelAdmin):\n inlines = [DepartmentInline]\n search_fields = 'description',\n\n def save_model(self, request, obj, form, change):\n obj.code = obj.description.replace(' ', '_')\n obj.save()\n\n def change_view(self, request, object_id, extra_content=None):\n self.exclude = '',\n return super(UniversityAdmin, self).change_view(request, object_id)\n\n def add_view(self, request, extra_content=None):\n self.exclude = 'code',\n return super(UniversityAdmin, self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\n class Meta:\n Model = Course\n\n def __init__(self, *args, **kwargs):\n super(CourseForm, self).__init__(*args, **kwargs)\n self.fields['prerequisite'].queryset = Course.objects.exclude(id__exact\n =self.instance.id)\n\n def clean(self):\n cleaned_data = self.cleaned_data\n if self.instance.pk is None:\n if Course.objects.filter(code=cleaned_data['code'], university=\n cleaned_data['university']).exists():\n raise forms.ValidationError(\n 'The course already exists at this university.')\n return cleaned_data\n\n\nclass CourseAdmin(admin.ModelAdmin):\n form = CourseForm\n list_display = 'code', 'university'\n list_filter = 'university',\n search_fields = 'code',\n\n def save_model(self, request, obj, form, change):\n if obj.code == '':\n obj.code = obj.name.replace(' ', '_')\n obj.save()\n\n\nclass dbAdmin(UserAdmin):\n fieldsets = (None, {'fields': ('email', 'password')}), (_(\n 'Personal info'), {'fields': ('first_name', 'last_name')}), (_(\n 'Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n 'groups', 'user_permissions')}), (_('Important dates'), {'fields':\n ('last_login', 'date_joined')})\n add_fieldsets = (None, {'classes': ('wide',), 'fields': ('email',\n 'password1', 'password2')}),\n form = CustomUserChangeForm\n add_form = CustomUserCreationForm\n list_display = 'email', 'first_name', 'last_name', 'is_staff'\n search_fields = 'email', 'first_name', 'last_name'\n ordering = 'email',\n\n\nadmin.site.register(dbUser, dbAdmin)\nadmin.site.register(University, UniversityAdmin)\nadmin.site.register(Program, ProgramAdmin)\nadmin.site.register(Department, DepartmentAdmin)\nadmin.site.register(Course, CourseAdmin)\n",
"step-5": "from django.contrib import admin\nfrom django.contrib.auth.admin import UserAdmin\nfrom django.utils.translation import ugettext_lazy as _\nfrom django import forms\nfrom programs.models import *\nfrom programs.forms import CustomUserCreationForm, CustomUserChangeForm\nimport pdb\n\nclass ProgramAdmin(admin.ModelAdmin):\n\n\tlist_display = ('description','get_university')\n\tsearch_fields=('description','department__university__code')\n\tlist_filter = ('department__university',)\n\n\tdef get_university(self,obj):\n\t\treturn obj.department.university\n\n\tdef save_model(self,request,obj,form,change):\n\t\tobj.code = obj.description.replace(' ','_')\n\t\tobj.save()\n\n\tget_university.short_description = 'University'\n\n\tdef change_view(self,request,object_id,extra_content=None):\n\t\tself.exclude = ('',)\n\t\treturn super(ProgramAdmin,self).change_view(request,object_id)\n\n\tdef add_view(self,request,extra_content=None):\n\n\t\tself.exclude = ('code',)\n\t\treturn super(ProgramAdmin,self).add_view(request)\n\nclass ProgramInline(admin.TabularInline):\n\tmodel = Program\n\textra = 0\n\tfields = ('description',)\n\nclass DepartmentAdmin(admin.ModelAdmin):\n\n\tfieldsets = [\n\t(None, {'fields':['description','university','tenured','nonTenured']}),\n\t]\n\tinlines = [ProgramInline]\n\n\tsearch_fields = ('university__description','description')\n\tlist_filter = ('description','university')\n\n\tdef save_model(self,request,obj,form,change):\n\t\tif obj.code == '':\n\t\t\tobj.code = obj.name.replace(' ','_')\n\t\tobj.save()\n\n\nclass DepartmentInline(admin.TabularInline):\n\tmodel = Department\n\textra = 0\n\tfields = ('description',)\n\nclass UniversityAdmin(admin.ModelAdmin):\n\n\tinlines = [DepartmentInline]\n\n\tsearch_fields = ('description',)\n\n\tdef save_model(self,request,obj,form,change):\n\t\tobj.code = obj.description.replace(' ','_')\n\t\tobj.save()\n\n\tdef change_view(self,request,object_id,extra_content=None):\n\t\tself.exclude = ('',)\n\t\treturn super(UniversityAdmin,self).change_view(request,object_id)\n\n\tdef add_view(self,request,extra_content=None):\n\n\t\tself.exclude = ('code',)\n\t\treturn super(UniversityAdmin,self).add_view(request)\n\n\nclass CourseForm(forms.ModelForm):\n\n\tclass Meta:\n\t\tModel = Course\n\n\tdef __init__(self,*args,**kwargs):\n\t\tsuper(CourseForm,self).__init__(*args,**kwargs)\n\t\tself.fields['prerequisite'].queryset = Course.objects.exclude(id__exact=self.instance.id)\n\n\tdef clean(self):\n\t\t#Need to handle validation for unique_together\n\n\t\tcleaned_data = self.cleaned_data\n\t\tif self.instance.pk is None:\n\t\t\tif Course.objects.filter(code=cleaned_data['code'],university=cleaned_data['university']).exists():\n\t\t\t\traise forms.ValidationError('The course already exists at this university.')\n\n\t\treturn cleaned_data\n\nclass CourseAdmin(admin.ModelAdmin):\n\tform = CourseForm\n\n\tlist_display = ('code','university',)\n\tlist_filter = ('university',)\n\tsearch_fields = ('code',)\n\n\tdef save_model(self,request,obj,form,change):\n\t\tif obj.code == '':\n\t\t\tobj.code = obj.name.replace(' ','_')\n\n\t\tobj.save()\n\n\nclass dbAdmin(UserAdmin):\n\tfieldsets = (\n\t\t(None, {'fields': ('email', 'password')}),\n\t\t(_('Personal info'), {'fields': ('first_name', 'last_name')}),\n\t\t(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',\n\t\t\t'groups', 'user_permissions')}),\n\t\t(_('Important dates'), {'fields': ('last_login', 'date_joined')}),\n\t\t)\n\n\tadd_fieldsets = (\n\t\t(None, {\n\t\t\t'classes': ('wide',),\n\t\t\t'fields': ('email', 'password1', 'password2')}\n\t\t\t),\n\t\t)\n\tform = CustomUserChangeForm\n\tadd_form = CustomUserCreationForm\n\tlist_display = ('email', 'first_name', 'last_name', 'is_staff')\n\tsearch_fields = ('email', 'first_name', 'last_name')\n\tordering = ('email',)\n\nadmin.site.register(dbUser, dbAdmin)\nadmin.site.register(University,UniversityAdmin)\nadmin.site.register(Program,ProgramAdmin)\nadmin.site.register(Department,DepartmentAdmin)\nadmin.site.register(Course,CourseAdmin)\n\n",
"step-ids": [
17,
23,
24,
27,
29
]
}
|
[
17,
23,
24,
27,
29
] |
# Stubs for torch.nn.utils (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_
from .convert_parameters import parameters_to_vector, vector_to_parameters
from .spectral_norm import remove_spectral_norm, spectral_norm
from .weight_norm import remove_weight_norm, weight_norm
|
normal
|
{
"blob_id": "5d9ace3b6c5b4e24fc3b20b5e5640f2fcdb252bb",
"index": 9292,
"step-1": "<mask token>\n",
"step-2": "from .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_\nfrom .convert_parameters import parameters_to_vector, vector_to_parameters\nfrom .spectral_norm import remove_spectral_norm, spectral_norm\nfrom .weight_norm import remove_weight_norm, weight_norm\n",
"step-3": "# Stubs for torch.nn.utils (Python 3)\n#\n# NOTE: This dynamically typed stub was automatically generated by stubgen.\n\nfrom .clip_grad import clip_grad_norm, clip_grad_norm_, clip_grad_value_\nfrom .convert_parameters import parameters_to_vector, vector_to_parameters\nfrom .spectral_norm import remove_spectral_norm, spectral_norm\nfrom .weight_norm import remove_weight_norm, weight_norm\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# coding=utf-8
"""
author: wlc
function: 百科检索数据层
"""
# 引入外部库
import json
import re
from bs4 import BeautifulSoup
# 引入内部库
from src.util.reptile import *
class EncyclopediaDao:
@staticmethod
def get_key_content (key: str) -> list:
"""
获取指定关键字的百科内容检索内容
:param key:
:return:
"""
# 1.参数设置
url = 'https://zh.wikipedia.org/w/api.php?'
parm = {
'action': 'query',
'list': 'search',
'srsearch': key,
'format': 'json',
'formatversion': '2'
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)
content_list = json.loads(page_content)['query']['search']
# 3.百科内容格式化
data = []
prefix = 'https://zh.wikipedia.org/wiki/'
for index, item in enumerate(content_list):
date, time = item['timestamp'].rstrip('Z').split('T')
entry = {
'id': item['pageid'],
'index': index,
'create_date': date,
'create_time': time,
'title': item['title'],
'abstract': re.sub('[<span class=\"searchmatch\">,</span>]', '', item['snippet']),
'url': prefix + item['title'],
}
data.append(entry)
return data
@staticmethod
def get_key_title(key: str) -> list:
"""
获取指定关键字的百科内容检索标题
:param key:
:return:
"""
# 1.参数设置
url = 'https://zh.wikipedia.org/w/api.php?'
parm = {
'action': 'opensearch',
'search': key,
'format': 'json',
'formatversion': '2'
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)
content_list = json.loads(page_content)[1]
# 3.百科内容格式化
data = []
prefix = 'https://zh.wikipedia.org/wiki/'
for index, item in enumerate(content_list):
entry = {
'index': index,
'title': item,
'url': prefix + item,
}
data.append(entry)
return data
@staticmethod
def get_faq_content(query: str, page: str) -> list:
"""
获取指定query的faq检索内容
:param query:
:param page:
:return:
"""
# 1.参数设置
url = 'https://zhidao.baidu.com/search?'
parm = {
'lm': '0',
'rn': '5',
'pn': page,
'fr': 'search',
'ie': 'gbk',
'word': query
}
# 2.百科内容获取
reptile = Reptile()
page_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3, is_cookie=True, charset='gbk')
bs = BeautifulSoup(page_content, "html.parser")
content_list = bs.body.find_all("dl", {'class': 'dl'})
# 3.百科内容格式化
data = []
for item in content_list:
entry = {
'create_date': item.find("dd", {'class': 'dd explain f-light'}).span.text,
'title': item.a.text,
'abstract': item.find("dd", {'class': 'dd answer'}).text,
'url': item.a.get('href')
}
data.append(entry)
return data
|
normal
|
{
"blob_id": "a7f348b258e1d6b02a79c60e4fe54b6d53801f70",
"index": 3877,
"step-1": "<mask token>\n\n\nclass EncyclopediaDao:\n <mask token>\n <mask token>\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-2": "<mask token>\n\n\nclass EncyclopediaDao:\n <mask token>\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-3": "<mask token>\n\n\nclass EncyclopediaDao:\n\n @staticmethod\n def get_key_content(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'query', 'list': 'search', 'srsearch': key,\n 'format': 'json', 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)['query']['search']\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n date, time = item['timestamp'].rstrip('Z').split('T')\n entry = {'id': item['pageid'], 'index': index, 'create_date':\n date, 'create_time': time, 'title': item['title'],\n 'abstract': re.sub('[<span class=\"searchmatch\">,</span>]',\n '', item['snippet']), 'url': prefix + item['title']}\n data.append(entry)\n return data\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-4": "<mask token>\nimport json\nimport re\nfrom bs4 import BeautifulSoup\nfrom src.util.reptile import *\n\n\nclass EncyclopediaDao:\n\n @staticmethod\n def get_key_content(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'query', 'list': 'search', 'srsearch': key,\n 'format': 'json', 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)['query']['search']\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n date, time = item['timestamp'].rstrip('Z').split('T')\n entry = {'id': item['pageid'], 'index': index, 'create_date':\n date, 'create_time': time, 'title': item['title'],\n 'abstract': re.sub('[<span class=\"searchmatch\">,</span>]',\n '', item['snippet']), 'url': prefix + item['title']}\n data.append(entry)\n return data\n\n @staticmethod\n def get_key_title(key: str) ->list:\n \"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zh.wikipedia.org/w/api.php?'\n parm = {'action': 'opensearch', 'search': key, 'format': 'json',\n 'formatversion': '2'}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3)\n content_list = json.loads(page_content)[1]\n data = []\n prefix = 'https://zh.wikipedia.org/wiki/'\n for index, item in enumerate(content_list):\n entry = {'index': index, 'title': item, 'url': prefix + item}\n data.append(entry)\n return data\n\n @staticmethod\n def get_faq_content(query: str, page: str) ->list:\n \"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n url = 'https://zhidao.baidu.com/search?'\n parm = {'lm': '0', 'rn': '5', 'pn': page, 'fr': 'search', 'ie':\n 'gbk', 'word': query}\n reptile = Reptile()\n page_content = reptile.get_page_content(url + '&'.join([(key + '=' +\n parm[key]) for key in parm]), timeout=3, is_cookie=True,\n charset='gbk')\n bs = BeautifulSoup(page_content, 'html.parser')\n content_list = bs.body.find_all('dl', {'class': 'dl'})\n data = []\n for item in content_list:\n entry = {'create_date': item.find('dd', {'class':\n 'dd explain f-light'}).span.text, 'title': item.a.text,\n 'abstract': item.find('dd', {'class': 'dd answer'}).text,\n 'url': item.a.get('href')}\n data.append(entry)\n return data\n",
"step-5": "# coding=utf-8\n\n\"\"\"\nauthor: wlc\nfunction: 百科检索数据层\n\"\"\"\n\n# 引入外部库\nimport json\nimport re\nfrom bs4 import BeautifulSoup\n\n# 引入内部库\nfrom src.util.reptile import *\n\n\nclass EncyclopediaDao:\n\t@staticmethod\n\tdef get_key_content (key: str) -> list:\n\t\t\"\"\"\n\t\t获取指定关键字的百科内容检索内容\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zh.wikipedia.org/w/api.php?'\n\t\tparm = {\n\t\t\t'action': 'query',\n\t\t\t'list': 'search',\n\t\t\t'srsearch': key,\n\t\t\t'format': 'json',\n\t\t\t'formatversion': '2'\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)\n\t\tcontent_list = json.loads(page_content)['query']['search']\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tprefix = 'https://zh.wikipedia.org/wiki/'\n\t\tfor index, item in enumerate(content_list):\n\t\t\tdate, time = item['timestamp'].rstrip('Z').split('T')\n\t\t\tentry = {\n\t\t\t\t'id': item['pageid'],\n\t\t\t\t'index': index,\n\t\t\t\t'create_date': date,\n\t\t\t\t'create_time': time,\n\t\t\t\t'title': item['title'],\n\t\t\t\t'abstract': re.sub('[<span class=\\\"searchmatch\\\">,</span>]', '', item['snippet']),\n\t\t\t\t'url': prefix + item['title'],\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n\n\t@staticmethod\n\tdef get_key_title(key: str) -> list:\n\t\t\"\"\"\n\t\t获取指定关键字的百科内容检索标题\n\t\t:param key:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zh.wikipedia.org/w/api.php?'\n\t\tparm = {\n\t\t\t'action': 'opensearch',\n\t\t\t'search': key,\n\t\t\t'format': 'json',\n\t\t\t'formatversion': '2'\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3)\n\t\tcontent_list = json.loads(page_content)[1]\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tprefix = 'https://zh.wikipedia.org/wiki/'\n\t\tfor index, item in enumerate(content_list):\n\t\t\tentry = {\n\t\t\t\t'index': index,\n\t\t\t\t'title': item,\n\t\t\t\t'url': prefix + item,\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n\n\t@staticmethod\n\tdef get_faq_content(query: str, page: str) -> list:\n\t\t\"\"\"\n\t\t获取指定query的faq检索内容\n\t\t:param query:\n\t\t:param page:\n\t\t:return:\n\t\t\"\"\"\n\t\t# 1.参数设置\n\t\turl = 'https://zhidao.baidu.com/search?'\n\t\tparm = {\n\t\t\t'lm': '0',\n\t\t\t'rn': '5',\n\t\t\t'pn': page,\n\t\t\t'fr': 'search',\n\t\t\t'ie': 'gbk',\n\t\t\t'word': query\n\t\t}\n\n\t\t# 2.百科内容获取\n\t\treptile = Reptile()\n\t\tpage_content = reptile.get_page_content(url + '&'.join([key + '=' + parm[key] for key in parm]), timeout=3, is_cookie=True, charset='gbk')\n\t\tbs = BeautifulSoup(page_content, \"html.parser\")\n\t\tcontent_list = bs.body.find_all(\"dl\", {'class': 'dl'})\n\n\t\t# 3.百科内容格式化\n\t\tdata = []\n\t\tfor item in content_list:\n\t\t\tentry = {\n\t\t\t\t'create_date': item.find(\"dd\", {'class': 'dd explain f-light'}).span.text,\n\t\t\t\t'title': item.a.text,\n\t\t\t\t'abstract': item.find(\"dd\", {'class': 'dd answer'}).text,\n\t\t\t\t'url': item.a.get('href')\n\t\t\t}\n\t\t\tdata.append(entry)\n\n\t\treturn data\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
from python_logging.Demo_CustomLogger import CustomLogger
CustomLogger.init_log()
# CustomLogger.info()
log_str = '%s/%s/%s\n' % ("demo1", "demo2", "demo3")
CustomLogger.info('[main]', log_str)
|
normal
|
{
"blob_id": "ed5653455062cb3468c232cf0fa3f1d18793626a",
"index": 591,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nCustomLogger.init_log()\n<mask token>\nCustomLogger.info('[main]', log_str)\n",
"step-3": "<mask token>\nCustomLogger.init_log()\nlog_str = '%s/%s/%s\\n' % ('demo1', 'demo2', 'demo3')\nCustomLogger.info('[main]', log_str)\n",
"step-4": "from python_logging.Demo_CustomLogger import CustomLogger\nCustomLogger.init_log()\nlog_str = '%s/%s/%s\\n' % ('demo1', 'demo2', 'demo3')\nCustomLogger.info('[main]', log_str)\n",
"step-5": "from python_logging.Demo_CustomLogger import CustomLogger\n\nCustomLogger.init_log()\n# CustomLogger.info()\nlog_str = '%s/%s/%s\\n' % (\"demo1\", \"demo2\", \"demo3\")\nCustomLogger.info('[main]', log_str)\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# 8-7. Album: Write a function called make_album() that builds a dictionary
# describing a music album. The function should take in an artist name and an
# album title, and it should return a dictionary containing these two pieces
# of information. Use the function to make three dictionaries representing
# different albums. Print each return value to show that the dictionaries are
# storing the album information correctly. Use None to add an optional
# parameter to make_album() that allows you to store the number of songs on an
# album. If the calling line includes a value for the number of songs, add
# that value to the album’s dictionary. Make at least one new function call
# that includes the number of songs on an album.
# PART ONE
def make_album(artist_name, album_title):
"""Build a dictionary describing a music album"""
music_album = {
'Artist': artist_name.title(),
'Album': album_title.title()
}
return music_album
print("Here's Part One:")
cardi = make_album('cardi b', 'invasion of privacy')
print(cardi)
jhene = make_album('jhene aiko', 'souled out')
print(jhene)
lennon = make_album('lennon stella', 'three. two. one.')
print(lennon)
# PART TWO
def make_album_two(artist_name, album_title, number_of_songs= None):
"""Build a dictionary describing a music album"""
music_album = {'Artist': artist_name.title(),
'Album': album_title.title()}
if number_of_songs:
music_album['Number of Songs'] = number_of_songs
return music_album
print("\nHere's Part Two:")
cardi = make_album_two('cardi b', 'invasion of privacy')
print(cardi)
jhene = make_album_two('jhene aiko', 'souled out')
print(jhene)
lennon = make_album_two('lennon stella', 'three. two. one.', 13)
print(lennon)
|
normal
|
{
"blob_id": "19888c998e8787533e84413272da1183f16fcdb1",
"index": 2974,
"step-1": "<mask token>\n\n\ndef make_album_two(artist_name, album_title, number_of_songs=None):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n if number_of_songs:\n music_album['Number of Songs'] = number_of_songs\n return music_album\n\n\n<mask token>\n",
"step-2": "def make_album(artist_name, album_title):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n return music_album\n\n\n<mask token>\n\n\ndef make_album_two(artist_name, album_title, number_of_songs=None):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n if number_of_songs:\n music_album['Number of Songs'] = number_of_songs\n return music_album\n\n\n<mask token>\n",
"step-3": "def make_album(artist_name, album_title):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n return music_album\n\n\nprint(\"Here's Part One:\")\n<mask token>\nprint(cardi)\n<mask token>\nprint(jhene)\n<mask token>\nprint(lennon)\n\n\ndef make_album_two(artist_name, album_title, number_of_songs=None):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n if number_of_songs:\n music_album['Number of Songs'] = number_of_songs\n return music_album\n\n\nprint(\"\"\"\nHere's Part Two:\"\"\")\n<mask token>\nprint(cardi)\n<mask token>\nprint(jhene)\n<mask token>\nprint(lennon)\n",
"step-4": "def make_album(artist_name, album_title):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n return music_album\n\n\nprint(\"Here's Part One:\")\ncardi = make_album('cardi b', 'invasion of privacy')\nprint(cardi)\njhene = make_album('jhene aiko', 'souled out')\nprint(jhene)\nlennon = make_album('lennon stella', 'three. two. one.')\nprint(lennon)\n\n\ndef make_album_two(artist_name, album_title, number_of_songs=None):\n \"\"\"Build a dictionary describing a music album\"\"\"\n music_album = {'Artist': artist_name.title(), 'Album': album_title.title()}\n if number_of_songs:\n music_album['Number of Songs'] = number_of_songs\n return music_album\n\n\nprint(\"\"\"\nHere's Part Two:\"\"\")\ncardi = make_album_two('cardi b', 'invasion of privacy')\nprint(cardi)\njhene = make_album_two('jhene aiko', 'souled out')\nprint(jhene)\nlennon = make_album_two('lennon stella', 'three. two. one.', 13)\nprint(lennon)\n",
"step-5": "# 8-7. Album: Write a function called make_album() that builds a dictionary\n# describing a music album. The function should take in an artist name and an\n# album title, and it should return a dictionary containing these two pieces\n# of information. Use the function to make three dictionaries representing\n# different albums. Print each return value to show that the dictionaries are\n# storing the album information correctly. Use None to add an optional\n# parameter to make_album() that allows you to store the number of songs on an\n# album. If the calling line includes a value for the number of songs, add\n# that value to the album’s dictionary. Make at least one new function call\n# that includes the number of songs on an album.\n\n# PART ONE\n\ndef make_album(artist_name, album_title): \n \"\"\"Build a dictionary describing a music album\"\"\" \n music_album = {\n 'Artist': artist_name.title(),\n 'Album': album_title.title()\n }\n return music_album\n\nprint(\"Here's Part One:\")\ncardi = make_album('cardi b', 'invasion of privacy')\nprint(cardi)\n\njhene = make_album('jhene aiko', 'souled out')\nprint(jhene)\n\nlennon = make_album('lennon stella', 'three. two. one.')\nprint(lennon)\n\n# PART TWO\ndef make_album_two(artist_name, album_title, number_of_songs= None): \n \"\"\"Build a dictionary describing a music album\"\"\" \n music_album = {'Artist': artist_name.title(),\n 'Album': album_title.title()}\n if number_of_songs:\n music_album['Number of Songs'] = number_of_songs\n return music_album\n\nprint(\"\\nHere's Part Two:\")\ncardi = make_album_two('cardi b', 'invasion of privacy')\nprint(cardi)\n\njhene = make_album_two('jhene aiko', 'souled out')\nprint(jhene)\n\nlennon = make_album_two('lennon stella', 'three. two. one.', 13)\nprint(lennon)\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
#Print Stop words
stop_words = set(stopwords.words("english"))
print(stop_words)
example_text = "This is general sentence to just clarify if stop words are working or not. I have some awesome projects coming up"
words = word_tokenize(example_text)
filtered_sentence = []
for w in words:
for w not in stop_words:
filtered_sentence.append(w)
#print filtered sentences
print(filtered_sentence)
#print in a line
filtered_sentence1 = [w for w in words if not w in stop_words]
#print filtered sentences
print(filtered_sentence1)
|
normal
|
{
"blob_id": "90f5629ac48edfccea57243ffb6188a98123367d",
"index": 5197,
"step-1": "from nltk.corpus import stopwords\r\nfrom nltk.tokenize import word_tokenize\r\n\r\n#Print Stop words\r\nstop_words = set(stopwords.words(\"english\"))\r\nprint(stop_words)\r\n\r\nexample_text = \"This is general sentence to just clarify if stop words are working or not. I have some awesome projects coming up\"\r\n\r\nwords = word_tokenize(example_text)\r\n\r\nfiltered_sentence = []\r\nfor w in words:\r\n for w not in stop_words:\r\n filtered_sentence.append(w)\r\n\r\n#print filtered sentences\r\nprint(filtered_sentence)\r\n\r\n#print in a line\r\nfiltered_sentence1 = [w for w in words if not w in stop_words]\r\n\r\n#print filtered sentences\r\nprint(filtered_sentence1)\r\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Time : 2021/05/08 20:06
# @Author : Yi
# @FileName: show_slices.py
import os
import pydicom
import glob
import shutil
import random
import numpy as np
import cv2
import skimage.io as io
from data_Parameter import parse_args
import matplotlib.pyplot as plt
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if (os.path.exists(path)) and (os.listdir(path) != []):
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split("_")[1]
dcm_size = len(glob.glob(path + "/*.dcm"))
dcms = [
path + "/E" + pi + "S101I%d.dcm" % dicom_slicei
for dicom_slicei in range(1, dcm_size + 1)
]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
# print(dcm_f.shape)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[
dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,
dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,
dcmi,
] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
# special cases: "P556", "P576", "P887",160*640*640
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split("_")[1]
dcm_img = read_dicom(input_dir + "/" + casei)
print("Dcm shape: ", dcm_img.shape)
# choices = random.sample(list(np.arange(0, 720, 1)), 10)
# choices.append(316)
choices = range(330,350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img=ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path,mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice=random.sample(os.listdir(image_path),10)
for file_name in files_choice:
image_numpy=np.load(image_path+'/'+file_name)
mask_numpy =np.load(mask_path+'/'+file_name)
fig =plt.figure(figsize=(10,5))
ax1 =fig.add_subplot(211)
img1=ax1.imshow(image_numpy,cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2=fig.add_subplot(212)
img2=ax2.imshow(mask_numpy,cmap='gray')
# ax2.set_title(str(file_name))
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
# image_avail_dir = args.image_save_sep_position + '/ICAR/positive'
# image_avail_dir = args.image_save_sep_position + '/ICAR/negative'
# circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
# show_image(image_input_dir) # 随机展示一些病例图像。
# show_image_avail(image_avail_dir)
show_mask_circle(circle_mask_dir)
# show_image_mask(image_avail_dir,circle_mask_dir)
if __name__ == '__main__':
args = parse_args()
main(args)
|
normal
|
{
"blob_id": "4905b820f33619a80a9915d0603bc39e0d0368d9",
"index": 6175,
"step-1": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\n<mask token>\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-4": "import os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-5": "# !/usr/bin/env python3\n# -*- coding:utf-8 -*-\n\n# @Time : 2021/05/08 20:06\n# @Author : Yi\n# @FileName: show_slices.py\n\nimport os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\n\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if (os.path.exists(path)) and (os.listdir(path) != []):\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n\n pi = os.path.basename(path).split(\"_\")[1]\n dcm_size = len(glob.glob(path + \"/*.dcm\"))\n dcms = [\n path + \"/E\" + pi + \"S101I%d.dcm\" % dicom_slicei\n for dicom_slicei in range(1, dcm_size + 1)\n ]\n\n length = int(len(dcms))\n print(length)\n\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n # print(dcm_f.shape)\n\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n\n dcm_img[\n dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,\n dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,\n dcmi,\n ] = cdcm\n\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n # special cases: \"P556\", \"P576\", \"P887\",160*640*640\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split(\"_\")[1]\n dcm_img = read_dicom(input_dir + \"/\" + casei)\n print(\"Dcm shape: \", dcm_img.shape)\n\n # choices = random.sample(list(np.arange(0, 720, 1)), 10)\n # choices.append(316)\n\n choices = range(330,350)\n\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img=ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n\n plt.show()\n\n\ndef show_image_mask(image_path,mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n\n files_choice=random.sample(os.listdir(image_path),10)\n\n for file_name in files_choice:\n image_numpy=np.load(image_path+'/'+file_name)\n mask_numpy =np.load(mask_path+'/'+file_name)\n\n fig =plt.figure(figsize=(10,5))\n ax1 =fig.add_subplot(211)\n img1=ax1.imshow(image_numpy,cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n\n ax2=fig.add_subplot(212)\n img2=ax2.imshow(mask_numpy,cmap='gray')\n # ax2.set_title(str(file_name))\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n\n # image_avail_dir = args.image_save_sep_position + '/ICAR/positive'\n # image_avail_dir = args.image_save_sep_position + '/ICAR/negative'\n\n # circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n\n # show_image(image_input_dir) # 随机展示一些病例图像。\n # show_image_avail(image_avail_dir)\n show_mask_circle(circle_mask_dir)\n\n # show_image_mask(image_avail_dir,circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-23 17:10
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sepomex', '0006_auto_20151113_2154'),
]
operations = [
migrations.CreateModel(
name='MXCiudad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('mx_estado', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ciudades', to='sepomex.MXEstado')),
],
),
migrations.AddField(
model_name='mxasentamiento',
name='mx_ciudad',
field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='ciudad', to='sepomex.MXCiudad'),
preserve_default=False,
),
]
|
normal
|
{
"blob_id": "99c27d13349eba391866cfed25cc052b40910ea5",
"index": 2837,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sepomex', '0006_auto_20151113_2154')]\n operations = [migrations.CreateModel(name='MXCiudad', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('nombre', models.CharField(max_length=\n 200)), ('mx_estado', models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, related_name='ciudades', to='sepomex.MXEstado'))]\n ), migrations.AddField(model_name='mxasentamiento', name=\n 'mx_ciudad', field=models.ForeignKey(default='', on_delete=django.\n db.models.deletion.CASCADE, related_name='ciudad', to=\n 'sepomex.MXCiudad'), preserve_default=False)]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('sepomex', '0006_auto_20151113_2154')]\n operations = [migrations.CreateModel(name='MXCiudad', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('nombre', models.CharField(max_length=\n 200)), ('mx_estado', models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, related_name='ciudades', to='sepomex.MXEstado'))]\n ), migrations.AddField(model_name='mxasentamiento', name=\n 'mx_ciudad', field=models.ForeignKey(default='', on_delete=django.\n db.models.deletion.CASCADE, related_name='ciudad', to=\n 'sepomex.MXCiudad'), preserve_default=False)]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.2 on 2017-06-23 17:10\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('sepomex', '0006_auto_20151113_2154'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='MXCiudad',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('nombre', models.CharField(max_length=200)),\n ('mx_estado', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ciudades', to='sepomex.MXEstado')),\n ],\n ),\n migrations.AddField(\n model_name='mxasentamiento',\n name='mx_ciudad',\n field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='ciudad', to='sepomex.MXCiudad'),\n preserve_default=False,\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import math
def upsample1(d, p):
# 普通结界
assert 1 <= p <= 10
return d + p
def upsample2(d, p):
# 倍增结界
assert 2 <= p <= 3
return d * p
def downsample(d, p):
# 聚集结界
assert 2 <= p <= 10
return math.ceil(d / p)
# 初始化杀伤力范围
lethal_radius = 1
# 结界参数(z, p)
config = [(1, 6),
(2, 3),
(3, 3),
(2, 3),
(2, 3),
(3, 7)]
for i in range(int(input())):
z, p = list(map(int, input().strip().split()))
if z == 1:
lethal_radius = upsample1(lethal_radius, p)
if z == 2:
lethal_radius = upsample2(lethal_radius, p)
if z == 3:
lethal_radius = downsample(lethal_radius, p)
print(lethal_radius)
|
normal
|
{
"blob_id": "cb6f68c8b8a6cead1d9fcd25fa2a4e60f7a8fb28",
"index": 9746,
"step-1": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n<mask token>\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-3": "<mask token>\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-4": "import math\n\n\ndef upsample1(d, p):\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\nlethal_radius = 1\nconfig = [(1, 6), (2, 3), (3, 3), (2, 3), (2, 3), (3, 7)]\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n",
"step-5": "import math\n\n\ndef upsample1(d, p):\n # 普通结界\n assert 1 <= p <= 10\n return d + p\n\n\ndef upsample2(d, p):\n # 倍增结界\n assert 2 <= p <= 3\n return d * p\n\n\ndef downsample(d, p):\n # 聚集结界\n assert 2 <= p <= 10\n return math.ceil(d / p)\n\n\n# 初始化杀伤力范围\nlethal_radius = 1\n\n# 结界参数(z, p)\nconfig = [(1, 6),\n (2, 3),\n (3, 3),\n (2, 3),\n (2, 3),\n (3, 7)]\n\nfor i in range(int(input())):\n z, p = list(map(int, input().strip().split()))\n if z == 1:\n lethal_radius = upsample1(lethal_radius, p)\n if z == 2:\n lethal_radius = upsample2(lethal_radius, p)\n if z == 3:\n lethal_radius = downsample(lethal_radius, p)\nprint(lethal_radius)\n\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
'''
删除排序数组中的重复项:
给定一个排序数组,你需要在原地删除重复出现的元素,使得每个元素只出现一次,返回移除后数组的新长度。
不要使用额外的数组空间,你必须在原地修改输入数组并在使用 O(1) 额外空间的条件下完成。
示例 1:
给定数组 nums = [1,1,2],
函数应该返回新的长度 2, 并且原数组 nums 的前两个元素被修改为 1, 2。
你不需要考虑数组中超出新长度后面的元素。
示例 2:
给定 nums = [0,0,1,1,1,2,2,3,3,4],
函数应该返回新的长度 5, 并且原数组 nums 的前五个元素被修改为 0, 1, 2, 3, 4。
你不需要考虑数组中超出新长度后面的元素。
'''
def delete_sort_array(origin_list):
if len(origin_list) == 0:
return 0
elif len(origin_list) == 1:
return 1
else:
for index,item in enumerate(origin_list[:]):
if index+1 < len(origin_list):
if origin_list[index] == origin_list[index+1]:
origin_list.pop(index)
return len(origin_list)
print(delete_sort_array([1,1,5,5,6,6,13,14]))
|
normal
|
{
"blob_id": "ac0f0fbb9bcb450ac24198069ef8bea8b049ef47",
"index": 5824,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef delete_sort_array(origin_list):\n if len(origin_list) == 0:\n return 0\n elif len(origin_list) == 1:\n return 1\n else:\n for index, item in enumerate(origin_list[:]):\n if index + 1 < len(origin_list):\n if origin_list[index] == origin_list[index + 1]:\n origin_list.pop(index)\n return len(origin_list)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef delete_sort_array(origin_list):\n if len(origin_list) == 0:\n return 0\n elif len(origin_list) == 1:\n return 1\n else:\n for index, item in enumerate(origin_list[:]):\n if index + 1 < len(origin_list):\n if origin_list[index] == origin_list[index + 1]:\n origin_list.pop(index)\n return len(origin_list)\n\n\nprint(delete_sort_array([1, 1, 5, 5, 6, 6, 13, 14]))\n",
"step-4": "'''\n 删除排序数组中的重复项:\n\n给定一个排序数组,你需要在原地删除重复出现的元素,使得每个元素只出现一次,返回移除后数组的新长度。\n\n不要使用额外的数组空间,你必须在原地修改输入数组并在使用 O(1) 额外空间的条件下完成。\n\n示例 1:\n\n给定数组 nums = [1,1,2],\n\n函数应该返回新的长度 2, 并且原数组 nums 的前两个元素被修改为 1, 2。\n\n你不需要考虑数组中超出新长度后面的元素。\n示例 2:\n\n给定 nums = [0,0,1,1,1,2,2,3,3,4],\n\n函数应该返回新的长度 5, 并且原数组 nums 的前五个元素被修改为 0, 1, 2, 3, 4。\n\n你不需要考虑数组中超出新长度后面的元素。\n\n'''\n\ndef delete_sort_array(origin_list):\n if len(origin_list) == 0:\n return 0\n elif len(origin_list) == 1:\n return 1\n else:\n for index,item in enumerate(origin_list[:]):\n if index+1 < len(origin_list):\n if origin_list[index] == origin_list[index+1]:\n origin_list.pop(index)\n return len(origin_list)\nprint(delete_sort_array([1,1,5,5,6,6,13,14]))\n\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.forms import ModelForm
from django import forms
from models import *
from django.forms.widgets import *
class CommentForm(ModelForm):
# tags = TagField(widget=TagAutocomplete())
class Meta:
model=Comment
# fields = ('title', 'description', 'tags', 'enable_comments', 'owner')#, 'first_card' )
# widgets = {
# 'slug': HiddenInput,
# 'number_of_cards': HiddenInput,
# }
|
normal
|
{
"blob_id": "81535b43437f9bcb18973ceaa5c3340ad9bd4f0f",
"index": 4170,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass CommentForm(ModelForm):\n\n\n class Meta:\n model = Comment\n",
"step-3": "from django.forms import ModelForm\nfrom django import forms\nfrom models import *\nfrom django.forms.widgets import *\n\n\nclass CommentForm(ModelForm):\n\n\n class Meta:\n model = Comment\n",
"step-4": "from django.forms import ModelForm\nfrom django import forms\nfrom models import *\nfrom django.forms.widgets import *\n\nclass CommentForm(ModelForm):\n\t# tags = TagField(widget=TagAutocomplete())\n\tclass Meta:\n\t\tmodel=Comment\n\t\t# fields = ('title', 'description', 'tags', 'enable_comments', 'owner')#, 'first_card' )\n\t\t\n\t\t# widgets = {\n\t\t# \t'slug': HiddenInput,\n\t\t# \t'number_of_cards': HiddenInput,\n\t\t# \t}\n\t\t",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Give a string that represents a polynomial (Ex: "3x ^ 3 + 5x ^ 2 - 2x - 5") and
# a number (whole or float). Evaluate the polynomial for the given value.
#Horner method
def horner( poly, x):
result = poly[0]
for i in range(1 , len(poly)):
result = result*x + poly[i]
return result
# Let us evaluate value of
# 3x3 + 5x2 - 2x - 5 for x = 3
poly = [3 , 5 , -2 , -5 ]
x = 3
print("Value of polynomial is " , horner(poly, x))
|
normal
|
{
"blob_id": "750565af03d945fbdc32e26347b28977b203e9dc",
"index": 4858,
"step-1": "<mask token>\n",
"step-2": "def horner(poly, x):\n result = poly[0]\n for i in range(1, len(poly)):\n result = result * x + poly[i]\n return result\n\n\n<mask token>\n",
"step-3": "def horner(poly, x):\n result = poly[0]\n for i in range(1, len(poly)):\n result = result * x + poly[i]\n return result\n\n\n<mask token>\nprint('Value of polynomial is ', horner(poly, x))\n",
"step-4": "def horner(poly, x):\n result = poly[0]\n for i in range(1, len(poly)):\n result = result * x + poly[i]\n return result\n\n\npoly = [3, 5, -2, -5]\nx = 3\nprint('Value of polynomial is ', horner(poly, x))\n",
"step-5": "# Give a string that represents a polynomial (Ex: \"3x ^ 3 + 5x ^ 2 - 2x - 5\") and\n# a number (whole or float). Evaluate the polynomial for the given value.\n#Horner method\n\ndef horner( poly, x):\n result = poly[0]\n for i in range(1 , len(poly)):\n result = result*x + poly[i]\n return result\n# Let us evaluate value of \n# 3x3 + 5x2 - 2x - 5 for x = 3 \npoly = [3 , 5 , -2 , -5 ] \nx = 3\n \nprint(\"Value of polynomial is \" , horner(poly, x)) ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import pickle
import collections
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from IPython import embed
from optimizers.utils_1 import Model_1, Architecture_1
from optimizers.utils import Model, Architecture
colors={
'BOHB-PC-DARTS': 'darkorange',
'BOHB-DARTS': 'dodgerblue',
'BOHB-GDAS' : 'forestgreen',
'RE': 'crimson',
'RS': 'darkorchid',
'RL': 'sienna',
'TPE': 'deepskyblue',
'SMAC': 'violet',
'HB': 'darkgray',
'BOHB': 'gold'
}
markers={
'BOHB-DARTS': '^',
'BOHB-PC-DARTS': 'v',
'BOHB-GDAS' : 'x',
'RS': 'D',
'RE': 'o',
'RL': 's',
'SMAC': 'h',
'HB': '>',
'BOHB': '*',
'TPE': '<'
}
def get_incumbent(losses, time_stamps):
return_dict = {'time_stamps': [],
'losses': [],
}
current_incumbent = float('inf')
incumbent_budget = -float('inf')
for l, t in zip(losses, time_stamps):
if l < current_incumbent:
current_incumbent = l
return_dict['losses'].append(l)
return_dict['time_stamps'].append(t)
else:
return_dict['losses'].append(return_dict['losses'][-1])
return_dict['time_stamps'].append(t)
return return_dict.values()
def get_trajectories(args, global_min, path='regularized_evolution',
methods=['RE', 'RS']):
all_trajectories = {}
for m in methods:
dfs = []
for seed in range(500):
filename = os.path.join(path, m,
'algo_{}_0_ssp_{}_seed_{}.obj'.format(m, args.space,
seed))
try:
with open(filename, 'rb') as f:
data = pickle.load(f)
losses = [1 - x.test_accuracy - global_min for x in data]
times = np.array([x.training_time for x in data])
times = [np.sum(times[:i+1]) for i in range(len(times))]
if m in ['HB', 'BOHB']:
costs = np.array([x.budget for x in data])
costs = np.array(
[np.sum(costs[:i+1]) for i in range(len(costs))]
)
n = len(np.where(costs <= 280*108)[0])
times, losses = get_incumbent(losses[:n], times[:n])
else:
times, losses = get_incumbent(losses, times)
print(seed, ' MIN: ', min(losses))
df = pd.DataFrame({str(seed): losses}, index=times)
#embed()
dfs.append(df)
except FileNotFoundError:
break
df = merge_and_fill_trajectories(dfs, default_value=None)
if df.empty:
continue
print(m, df.shape)
all_trajectories[m] = {
'time_stamps': np.array(df.index),
'losses': np.array(df.T)
}
return all_trajectories
def merge_and_fill_trajectories(pandas_data_frames, default_value=None):
# merge all tracjectories keeping all time steps
df = pd.DataFrame().join(pandas_data_frames, how='outer')
# forward fill to make it a propper step function
df=df.fillna(method='ffill')
if default_value is None:
# backward fill to replace the NaNs for the early times by
# the performance of a random configuration
df=df.fillna(method='bfill')
else:
df=df.fillna(default_value)
return(df)
def plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,
incumbent=None, show=True, linewidth=3, marker_size=10,
xscale='log', xlabel='wall clock time [s]', yscale='log',
ylabel=None, legend_loc = 'best', xlim=None, ylim=None,
plot_mean=True, labels={}, markers=markers, colors=colors,
figsize=(16,9)):
if regret:
if ylabel is None: ylabel = 'regret'
# find lowest performance in the data to update incumbent
if incumbent is None:
incumbent = np.inf
for tr in incumbent_trajectories.values():
incumbent = min(tr['losses'][:,-1].min(), incumbent)
print('incumbent value: ', incumbent)
for m,tr in incumbent_trajectories.items():
trajectory = np.copy(tr['losses'])
if (trajectory.shape[0] == 0): continue
if regret: trajectory -= incumbent
sem = np.sqrt(trajectory.var(axis=0, ddof=1)/tr['losses'].shape[0])
if plot_mean:
mean = trajectory.mean(axis=0)
else:
mean = np.median(trajectory,axis=0)
sem *= 1.253
if 'DARTS' in m or 'GDAS' in m:
ax.fill_between(tr['time_stamps'], mean-2*sem, mean+2*sem,
color=colors[m], alpha=0.2)
ax.plot(tr['time_stamps'],mean,
label=labels.get(m, m), color=colors.get(m, None),linewidth=linewidth,
marker=markers.get(m,None), markersize=marker_size, markevery=(0.1,0.1))
if axins is not None:
axins.plot(tr['time_stamps'],mean,
label=labels.get(m, m), color=colors.get(m, None),linewidth=linewidth,
marker=markers.get(m,None), markersize=marker_size, markevery=(0.1,0.1))
return (fig, ax)
|
normal
|
{
"blob_id": "a757bbb9ad2f6f5bf04cdf4091b97841b8e40432",
"index": 6601,
"step-1": "<mask token>\n\n\ndef get_trajectories(args, global_min, path='regularized_evolution',\n methods=['RE', 'RS']):\n all_trajectories = {}\n for m in methods:\n dfs = []\n for seed in range(500):\n filename = os.path.join(path, m, 'algo_{}_0_ssp_{}_seed_{}.obj'\n .format(m, args.space, seed))\n try:\n with open(filename, 'rb') as f:\n data = pickle.load(f)\n losses = [(1 - x.test_accuracy - global_min) for x in data]\n times = np.array([x.training_time for x in data])\n times = [np.sum(times[:i + 1]) for i in range(len(times))]\n if m in ['HB', 'BOHB']:\n costs = np.array([x.budget for x in data])\n costs = np.array([np.sum(costs[:i + 1]) for i in\n range(len(costs))])\n n = len(np.where(costs <= 280 * 108)[0])\n times, losses = get_incumbent(losses[:n], times[:n])\n else:\n times, losses = get_incumbent(losses, times)\n print(seed, ' MIN: ', min(losses))\n df = pd.DataFrame({str(seed): losses}, index=times)\n dfs.append(df)\n except FileNotFoundError:\n break\n df = merge_and_fill_trajectories(dfs, default_value=None)\n if df.empty:\n continue\n print(m, df.shape)\n all_trajectories[m] = {'time_stamps': np.array(df.index), 'losses':\n np.array(df.T)}\n return all_trajectories\n\n\ndef merge_and_fill_trajectories(pandas_data_frames, default_value=None):\n df = pd.DataFrame().join(pandas_data_frames, how='outer')\n df = df.fillna(method='ffill')\n if default_value is None:\n df = df.fillna(method='bfill')\n else:\n df = df.fillna(default_value)\n return df\n\n\ndef plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,\n incumbent=None, show=True, linewidth=3, marker_size=10, xscale='log',\n xlabel='wall clock time [s]', yscale='log', ylabel=None, legend_loc=\n 'best', xlim=None, ylim=None, plot_mean=True, labels={}, markers=\n markers, colors=colors, figsize=(16, 9)):\n if regret:\n if ylabel is None:\n ylabel = 'regret'\n if incumbent is None:\n incumbent = np.inf\n for tr in incumbent_trajectories.values():\n incumbent = min(tr['losses'][:, -1].min(), incumbent)\n print('incumbent value: ', incumbent)\n for m, tr in incumbent_trajectories.items():\n trajectory = np.copy(tr['losses'])\n if trajectory.shape[0] == 0:\n continue\n if regret:\n trajectory -= incumbent\n sem = np.sqrt(trajectory.var(axis=0, ddof=1) / tr['losses'].shape[0])\n if plot_mean:\n mean = trajectory.mean(axis=0)\n else:\n mean = np.median(trajectory, axis=0)\n sem *= 1.253\n if 'DARTS' in m or 'GDAS' in m:\n ax.fill_between(tr['time_stamps'], mean - 2 * sem, mean + 2 *\n sem, color=colors[m], alpha=0.2)\n ax.plot(tr['time_stamps'], mean, label=labels.get(m, m), color=\n colors.get(m, None), linewidth=linewidth, marker=markers.get(m,\n None), markersize=marker_size, markevery=(0.1, 0.1))\n if axins is not None:\n axins.plot(tr['time_stamps'], mean, label=labels.get(m, m),\n color=colors.get(m, None), linewidth=linewidth, marker=\n markers.get(m, None), markersize=marker_size, markevery=(\n 0.1, 0.1))\n return fig, ax\n",
"step-2": "<mask token>\n\n\ndef get_incumbent(losses, time_stamps):\n return_dict = {'time_stamps': [], 'losses': []}\n current_incumbent = float('inf')\n incumbent_budget = -float('inf')\n for l, t in zip(losses, time_stamps):\n if l < current_incumbent:\n current_incumbent = l\n return_dict['losses'].append(l)\n return_dict['time_stamps'].append(t)\n else:\n return_dict['losses'].append(return_dict['losses'][-1])\n return_dict['time_stamps'].append(t)\n return return_dict.values()\n\n\ndef get_trajectories(args, global_min, path='regularized_evolution',\n methods=['RE', 'RS']):\n all_trajectories = {}\n for m in methods:\n dfs = []\n for seed in range(500):\n filename = os.path.join(path, m, 'algo_{}_0_ssp_{}_seed_{}.obj'\n .format(m, args.space, seed))\n try:\n with open(filename, 'rb') as f:\n data = pickle.load(f)\n losses = [(1 - x.test_accuracy - global_min) for x in data]\n times = np.array([x.training_time for x in data])\n times = [np.sum(times[:i + 1]) for i in range(len(times))]\n if m in ['HB', 'BOHB']:\n costs = np.array([x.budget for x in data])\n costs = np.array([np.sum(costs[:i + 1]) for i in\n range(len(costs))])\n n = len(np.where(costs <= 280 * 108)[0])\n times, losses = get_incumbent(losses[:n], times[:n])\n else:\n times, losses = get_incumbent(losses, times)\n print(seed, ' MIN: ', min(losses))\n df = pd.DataFrame({str(seed): losses}, index=times)\n dfs.append(df)\n except FileNotFoundError:\n break\n df = merge_and_fill_trajectories(dfs, default_value=None)\n if df.empty:\n continue\n print(m, df.shape)\n all_trajectories[m] = {'time_stamps': np.array(df.index), 'losses':\n np.array(df.T)}\n return all_trajectories\n\n\ndef merge_and_fill_trajectories(pandas_data_frames, default_value=None):\n df = pd.DataFrame().join(pandas_data_frames, how='outer')\n df = df.fillna(method='ffill')\n if default_value is None:\n df = df.fillna(method='bfill')\n else:\n df = df.fillna(default_value)\n return df\n\n\ndef plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,\n incumbent=None, show=True, linewidth=3, marker_size=10, xscale='log',\n xlabel='wall clock time [s]', yscale='log', ylabel=None, legend_loc=\n 'best', xlim=None, ylim=None, plot_mean=True, labels={}, markers=\n markers, colors=colors, figsize=(16, 9)):\n if regret:\n if ylabel is None:\n ylabel = 'regret'\n if incumbent is None:\n incumbent = np.inf\n for tr in incumbent_trajectories.values():\n incumbent = min(tr['losses'][:, -1].min(), incumbent)\n print('incumbent value: ', incumbent)\n for m, tr in incumbent_trajectories.items():\n trajectory = np.copy(tr['losses'])\n if trajectory.shape[0] == 0:\n continue\n if regret:\n trajectory -= incumbent\n sem = np.sqrt(trajectory.var(axis=0, ddof=1) / tr['losses'].shape[0])\n if plot_mean:\n mean = trajectory.mean(axis=0)\n else:\n mean = np.median(trajectory, axis=0)\n sem *= 1.253\n if 'DARTS' in m or 'GDAS' in m:\n ax.fill_between(tr['time_stamps'], mean - 2 * sem, mean + 2 *\n sem, color=colors[m], alpha=0.2)\n ax.plot(tr['time_stamps'], mean, label=labels.get(m, m), color=\n colors.get(m, None), linewidth=linewidth, marker=markers.get(m,\n None), markersize=marker_size, markevery=(0.1, 0.1))\n if axins is not None:\n axins.plot(tr['time_stamps'], mean, label=labels.get(m, m),\n color=colors.get(m, None), linewidth=linewidth, marker=\n markers.get(m, None), markersize=marker_size, markevery=(\n 0.1, 0.1))\n return fig, ax\n",
"step-3": "<mask token>\ncolors = {'BOHB-PC-DARTS': 'darkorange', 'BOHB-DARTS': 'dodgerblue',\n 'BOHB-GDAS': 'forestgreen', 'RE': 'crimson', 'RS': 'darkorchid', 'RL':\n 'sienna', 'TPE': 'deepskyblue', 'SMAC': 'violet', 'HB': 'darkgray',\n 'BOHB': 'gold'}\nmarkers = {'BOHB-DARTS': '^', 'BOHB-PC-DARTS': 'v', 'BOHB-GDAS': 'x', 'RS':\n 'D', 'RE': 'o', 'RL': 's', 'SMAC': 'h', 'HB': '>', 'BOHB': '*', 'TPE': '<'}\n\n\ndef get_incumbent(losses, time_stamps):\n return_dict = {'time_stamps': [], 'losses': []}\n current_incumbent = float('inf')\n incumbent_budget = -float('inf')\n for l, t in zip(losses, time_stamps):\n if l < current_incumbent:\n current_incumbent = l\n return_dict['losses'].append(l)\n return_dict['time_stamps'].append(t)\n else:\n return_dict['losses'].append(return_dict['losses'][-1])\n return_dict['time_stamps'].append(t)\n return return_dict.values()\n\n\ndef get_trajectories(args, global_min, path='regularized_evolution',\n methods=['RE', 'RS']):\n all_trajectories = {}\n for m in methods:\n dfs = []\n for seed in range(500):\n filename = os.path.join(path, m, 'algo_{}_0_ssp_{}_seed_{}.obj'\n .format(m, args.space, seed))\n try:\n with open(filename, 'rb') as f:\n data = pickle.load(f)\n losses = [(1 - x.test_accuracy - global_min) for x in data]\n times = np.array([x.training_time for x in data])\n times = [np.sum(times[:i + 1]) for i in range(len(times))]\n if m in ['HB', 'BOHB']:\n costs = np.array([x.budget for x in data])\n costs = np.array([np.sum(costs[:i + 1]) for i in\n range(len(costs))])\n n = len(np.where(costs <= 280 * 108)[0])\n times, losses = get_incumbent(losses[:n], times[:n])\n else:\n times, losses = get_incumbent(losses, times)\n print(seed, ' MIN: ', min(losses))\n df = pd.DataFrame({str(seed): losses}, index=times)\n dfs.append(df)\n except FileNotFoundError:\n break\n df = merge_and_fill_trajectories(dfs, default_value=None)\n if df.empty:\n continue\n print(m, df.shape)\n all_trajectories[m] = {'time_stamps': np.array(df.index), 'losses':\n np.array(df.T)}\n return all_trajectories\n\n\ndef merge_and_fill_trajectories(pandas_data_frames, default_value=None):\n df = pd.DataFrame().join(pandas_data_frames, how='outer')\n df = df.fillna(method='ffill')\n if default_value is None:\n df = df.fillna(method='bfill')\n else:\n df = df.fillna(default_value)\n return df\n\n\ndef plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,\n incumbent=None, show=True, linewidth=3, marker_size=10, xscale='log',\n xlabel='wall clock time [s]', yscale='log', ylabel=None, legend_loc=\n 'best', xlim=None, ylim=None, plot_mean=True, labels={}, markers=\n markers, colors=colors, figsize=(16, 9)):\n if regret:\n if ylabel is None:\n ylabel = 'regret'\n if incumbent is None:\n incumbent = np.inf\n for tr in incumbent_trajectories.values():\n incumbent = min(tr['losses'][:, -1].min(), incumbent)\n print('incumbent value: ', incumbent)\n for m, tr in incumbent_trajectories.items():\n trajectory = np.copy(tr['losses'])\n if trajectory.shape[0] == 0:\n continue\n if regret:\n trajectory -= incumbent\n sem = np.sqrt(trajectory.var(axis=0, ddof=1) / tr['losses'].shape[0])\n if plot_mean:\n mean = trajectory.mean(axis=0)\n else:\n mean = np.median(trajectory, axis=0)\n sem *= 1.253\n if 'DARTS' in m or 'GDAS' in m:\n ax.fill_between(tr['time_stamps'], mean - 2 * sem, mean + 2 *\n sem, color=colors[m], alpha=0.2)\n ax.plot(tr['time_stamps'], mean, label=labels.get(m, m), color=\n colors.get(m, None), linewidth=linewidth, marker=markers.get(m,\n None), markersize=marker_size, markevery=(0.1, 0.1))\n if axins is not None:\n axins.plot(tr['time_stamps'], mean, label=labels.get(m, m),\n color=colors.get(m, None), linewidth=linewidth, marker=\n markers.get(m, None), markersize=marker_size, markevery=(\n 0.1, 0.1))\n return fig, ax\n",
"step-4": "import os\nimport pickle\nimport collections\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom IPython import embed\nfrom optimizers.utils_1 import Model_1, Architecture_1\nfrom optimizers.utils import Model, Architecture\ncolors = {'BOHB-PC-DARTS': 'darkorange', 'BOHB-DARTS': 'dodgerblue',\n 'BOHB-GDAS': 'forestgreen', 'RE': 'crimson', 'RS': 'darkorchid', 'RL':\n 'sienna', 'TPE': 'deepskyblue', 'SMAC': 'violet', 'HB': 'darkgray',\n 'BOHB': 'gold'}\nmarkers = {'BOHB-DARTS': '^', 'BOHB-PC-DARTS': 'v', 'BOHB-GDAS': 'x', 'RS':\n 'D', 'RE': 'o', 'RL': 's', 'SMAC': 'h', 'HB': '>', 'BOHB': '*', 'TPE': '<'}\n\n\ndef get_incumbent(losses, time_stamps):\n return_dict = {'time_stamps': [], 'losses': []}\n current_incumbent = float('inf')\n incumbent_budget = -float('inf')\n for l, t in zip(losses, time_stamps):\n if l < current_incumbent:\n current_incumbent = l\n return_dict['losses'].append(l)\n return_dict['time_stamps'].append(t)\n else:\n return_dict['losses'].append(return_dict['losses'][-1])\n return_dict['time_stamps'].append(t)\n return return_dict.values()\n\n\ndef get_trajectories(args, global_min, path='regularized_evolution',\n methods=['RE', 'RS']):\n all_trajectories = {}\n for m in methods:\n dfs = []\n for seed in range(500):\n filename = os.path.join(path, m, 'algo_{}_0_ssp_{}_seed_{}.obj'\n .format(m, args.space, seed))\n try:\n with open(filename, 'rb') as f:\n data = pickle.load(f)\n losses = [(1 - x.test_accuracy - global_min) for x in data]\n times = np.array([x.training_time for x in data])\n times = [np.sum(times[:i + 1]) for i in range(len(times))]\n if m in ['HB', 'BOHB']:\n costs = np.array([x.budget for x in data])\n costs = np.array([np.sum(costs[:i + 1]) for i in\n range(len(costs))])\n n = len(np.where(costs <= 280 * 108)[0])\n times, losses = get_incumbent(losses[:n], times[:n])\n else:\n times, losses = get_incumbent(losses, times)\n print(seed, ' MIN: ', min(losses))\n df = pd.DataFrame({str(seed): losses}, index=times)\n dfs.append(df)\n except FileNotFoundError:\n break\n df = merge_and_fill_trajectories(dfs, default_value=None)\n if df.empty:\n continue\n print(m, df.shape)\n all_trajectories[m] = {'time_stamps': np.array(df.index), 'losses':\n np.array(df.T)}\n return all_trajectories\n\n\ndef merge_and_fill_trajectories(pandas_data_frames, default_value=None):\n df = pd.DataFrame().join(pandas_data_frames, how='outer')\n df = df.fillna(method='ffill')\n if default_value is None:\n df = df.fillna(method='bfill')\n else:\n df = df.fillna(default_value)\n return df\n\n\ndef plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,\n incumbent=None, show=True, linewidth=3, marker_size=10, xscale='log',\n xlabel='wall clock time [s]', yscale='log', ylabel=None, legend_loc=\n 'best', xlim=None, ylim=None, plot_mean=True, labels={}, markers=\n markers, colors=colors, figsize=(16, 9)):\n if regret:\n if ylabel is None:\n ylabel = 'regret'\n if incumbent is None:\n incumbent = np.inf\n for tr in incumbent_trajectories.values():\n incumbent = min(tr['losses'][:, -1].min(), incumbent)\n print('incumbent value: ', incumbent)\n for m, tr in incumbent_trajectories.items():\n trajectory = np.copy(tr['losses'])\n if trajectory.shape[0] == 0:\n continue\n if regret:\n trajectory -= incumbent\n sem = np.sqrt(trajectory.var(axis=0, ddof=1) / tr['losses'].shape[0])\n if plot_mean:\n mean = trajectory.mean(axis=0)\n else:\n mean = np.median(trajectory, axis=0)\n sem *= 1.253\n if 'DARTS' in m or 'GDAS' in m:\n ax.fill_between(tr['time_stamps'], mean - 2 * sem, mean + 2 *\n sem, color=colors[m], alpha=0.2)\n ax.plot(tr['time_stamps'], mean, label=labels.get(m, m), color=\n colors.get(m, None), linewidth=linewidth, marker=markers.get(m,\n None), markersize=marker_size, markevery=(0.1, 0.1))\n if axins is not None:\n axins.plot(tr['time_stamps'], mean, label=labels.get(m, m),\n color=colors.get(m, None), linewidth=linewidth, marker=\n markers.get(m, None), markersize=marker_size, markevery=(\n 0.1, 0.1))\n return fig, ax\n",
"step-5": "import os\nimport pickle\nimport collections\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom IPython import embed\n\nfrom optimizers.utils_1 import Model_1, Architecture_1\nfrom optimizers.utils import Model, Architecture\n\ncolors={\n 'BOHB-PC-DARTS': 'darkorange',\n 'BOHB-DARTS': 'dodgerblue',\n 'BOHB-GDAS' : 'forestgreen',\n 'RE': 'crimson',\n\t\t'RS': 'darkorchid',\n\t\t'RL': 'sienna',\n\t\t'TPE': 'deepskyblue',\n 'SMAC': 'violet',\n 'HB': 'darkgray',\n 'BOHB': 'gold'\n}\n\nmarkers={\n 'BOHB-DARTS': '^',\n 'BOHB-PC-DARTS': 'v',\n 'BOHB-GDAS' : 'x',\n 'RS': 'D',\n\t\t'RE': 'o',\n\t\t'RL': 's',\n\t\t'SMAC': 'h',\n 'HB': '>',\n 'BOHB': '*',\n 'TPE': '<'\n}\n\n\ndef get_incumbent(losses, time_stamps):\n return_dict = {'time_stamps': [],\n 'losses': [],\n }\n\n current_incumbent = float('inf')\n incumbent_budget = -float('inf')\n\n for l, t in zip(losses, time_stamps):\n if l < current_incumbent:\n current_incumbent = l\n return_dict['losses'].append(l)\n return_dict['time_stamps'].append(t)\n else:\n return_dict['losses'].append(return_dict['losses'][-1])\n return_dict['time_stamps'].append(t)\n return return_dict.values()\n\n\ndef get_trajectories(args, global_min, path='regularized_evolution',\n methods=['RE', 'RS']):\n all_trajectories = {}\n for m in methods:\n dfs = []\n for seed in range(500):\n filename = os.path.join(path, m,\n 'algo_{}_0_ssp_{}_seed_{}.obj'.format(m, args.space,\n seed))\n try:\n with open(filename, 'rb') as f:\n data = pickle.load(f)\n losses = [1 - x.test_accuracy - global_min for x in data]\n times = np.array([x.training_time for x in data])\n times = [np.sum(times[:i+1]) for i in range(len(times))]\n if m in ['HB', 'BOHB']:\n costs = np.array([x.budget for x in data])\n costs = np.array(\n [np.sum(costs[:i+1]) for i in range(len(costs))]\n )\n n = len(np.where(costs <= 280*108)[0])\n times, losses = get_incumbent(losses[:n], times[:n])\n else:\n times, losses = get_incumbent(losses, times)\n print(seed, ' MIN: ', min(losses))\n df = pd.DataFrame({str(seed): losses}, index=times)\n #embed()\n dfs.append(df)\n except FileNotFoundError:\n break\n df = merge_and_fill_trajectories(dfs, default_value=None)\n if df.empty:\n continue\n print(m, df.shape)\n\n all_trajectories[m] = {\n 'time_stamps': np.array(df.index),\n 'losses': np.array(df.T)\n }\n\n return all_trajectories\n\n\ndef merge_and_fill_trajectories(pandas_data_frames, default_value=None):\n\t# merge all tracjectories keeping all time steps\n\tdf = pd.DataFrame().join(pandas_data_frames, how='outer')\n\n\t# forward fill to make it a propper step function\n\tdf=df.fillna(method='ffill')\n\n\tif default_value is None:\n\t# backward fill to replace the NaNs for the early times by\n\t# the performance of a random configuration\n\t\tdf=df.fillna(method='bfill')\n\telse:\n\t\tdf=df.fillna(default_value)\n\n\treturn(df)\n\n\ndef plot_losses(fig, ax, axins, incumbent_trajectories, regret=True,\n incumbent=None, show=True, linewidth=3, marker_size=10,\n xscale='log', xlabel='wall clock time [s]', yscale='log',\n ylabel=None, legend_loc = 'best', xlim=None, ylim=None,\n plot_mean=True, labels={}, markers=markers, colors=colors,\n figsize=(16,9)):\n\n if regret:\n if ylabel is None: ylabel = 'regret'\n\t\t# find lowest performance in the data to update incumbent\n\n if incumbent is None:\n incumbent = np.inf\n for tr in incumbent_trajectories.values():\n incumbent = min(tr['losses'][:,-1].min(), incumbent)\n print('incumbent value: ', incumbent)\n\n for m,tr in incumbent_trajectories.items():\n trajectory = np.copy(tr['losses'])\n if (trajectory.shape[0] == 0): continue\n if regret: trajectory -= incumbent\n\n sem = np.sqrt(trajectory.var(axis=0, ddof=1)/tr['losses'].shape[0])\n if plot_mean:\n mean = trajectory.mean(axis=0)\n else:\n mean = np.median(trajectory,axis=0)\n sem *= 1.253\n\n if 'DARTS' in m or 'GDAS' in m:\n ax.fill_between(tr['time_stamps'], mean-2*sem, mean+2*sem,\n color=colors[m], alpha=0.2)\n\n ax.plot(tr['time_stamps'],mean,\n label=labels.get(m, m), color=colors.get(m, None),linewidth=linewidth,\n marker=markers.get(m,None), markersize=marker_size, markevery=(0.1,0.1))\n\n if axins is not None:\n axins.plot(tr['time_stamps'],mean,\n label=labels.get(m, m), color=colors.get(m, None),linewidth=linewidth,\n marker=markers.get(m,None), markersize=marker_size, markevery=(0.1,0.1))\n\n return (fig, ax)\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import os
from apps.app_base.app_utils.cryp_key import decrypt, get_secret_key
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = get_secret_key
DEBUG = True
ALLOWED_HOSTS = ['.localhost', '127.0.0.1', '[::1]']
# Application definition
INSTALLED_APPS = [
'corsheaders',
'django.contrib.sessions',
]
MIDDLEWARE = [
# CORS
'corsheaders.middleware.CorsMiddleware',
# Session
'django.contrib.sessions.middleware.SessionMiddleware',
# Cache
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
]
ROOT_URLCONF = 'apps.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'apps.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'luck',
'USER': 'postgres',
'PASSWORD': decrypt(b'gAAAAABfesT5OW3keTFXv6sUP_4NWJfG6U_ZEInkmCvJGdVSNA74VPJeG3lZLky8ZWEsjLsdxe_k_vgVCSIVCoTx1hOQsTb1kw=='),
'HOST': '127.0.0.1',
'PORT': '5432'
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
CACHES = {
# Local Memory Cache https://docs.djangoproject.com/en/3.1/topics/cache/
"default": {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'local-memory-lru',
},
"redis": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/0", # db0
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"CONNECTION_POOL_KWARGS": {"max_connections": 100}
}
}
}
# Use Redis for session
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
SESSION_CACHE_ALIAS = "redis"
SESSION_COOKIE_AGE = 3600 * 24 # In seconds
STATIC_URL = '/static/'
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True
|
normal
|
{
"blob_id": "027a049ffced721f2cd697bc928bfdf718630623",
"index": 4692,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nSECRET_KEY = get_secret_key\nDEBUG = True\nALLOWED_HOSTS = ['.localhost', '127.0.0.1', '[::1]']\nINSTALLED_APPS = ['corsheaders', 'django.contrib.sessions']\nMIDDLEWARE = ['corsheaders.middleware.CorsMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.cache.UpdateCacheMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.cache.FetchFromCacheMiddleware']\nROOT_URLCONF = 'apps.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\nWSGI_APPLICATION = 'apps.wsgi.application'\nDATABASES = {'default': {'ENGINE': 'django.db.backends.postgresql', 'NAME':\n 'luck', 'USER': 'postgres', 'PASSWORD': decrypt(\n b'gAAAAABfesT5OW3keTFXv6sUP_4NWJfG6U_ZEInkmCvJGdVSNA74VPJeG3lZLky8ZWEsjLsdxe_k_vgVCSIVCoTx1hOQsTb1kw=='\n ), 'HOST': '127.0.0.1', 'PORT': '5432'}}\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nCACHES = {'default': {'BACKEND':\n 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION':\n 'local-memory-lru'}, 'redis': {'BACKEND':\n 'django_redis.cache.RedisCache', 'LOCATION': 'redis://127.0.0.1:6379/0',\n 'OPTIONS': {'CLIENT_CLASS': 'django_redis.client.DefaultClient',\n 'CONNECTION_POOL_KWARGS': {'max_connections': 100}}}}\nSESSION_ENGINE = 'django.contrib.sessions.backends.cache'\nSESSION_CACHE_ALIAS = 'redis'\nSESSION_COOKIE_AGE = 3600 * 24\nSTATIC_URL = '/static/'\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_ALLOW_CREDENTIALS = True\n",
"step-3": "import os\nfrom apps.app_base.app_utils.cryp_key import decrypt, get_secret_key\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nSECRET_KEY = get_secret_key\nDEBUG = True\nALLOWED_HOSTS = ['.localhost', '127.0.0.1', '[::1]']\nINSTALLED_APPS = ['corsheaders', 'django.contrib.sessions']\nMIDDLEWARE = ['corsheaders.middleware.CorsMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.cache.UpdateCacheMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.cache.FetchFromCacheMiddleware']\nROOT_URLCONF = 'apps.urls'\nTEMPLATES = [{'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': {'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages']}}]\nWSGI_APPLICATION = 'apps.wsgi.application'\nDATABASES = {'default': {'ENGINE': 'django.db.backends.postgresql', 'NAME':\n 'luck', 'USER': 'postgres', 'PASSWORD': decrypt(\n b'gAAAAABfesT5OW3keTFXv6sUP_4NWJfG6U_ZEInkmCvJGdVSNA74VPJeG3lZLky8ZWEsjLsdxe_k_vgVCSIVCoTx1hOQsTb1kw=='\n ), 'HOST': '127.0.0.1', 'PORT': '5432'}}\nAUTH_PASSWORD_VALIDATORS = [{'NAME':\n 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'\n }, {'NAME':\n 'django.contrib.auth.password_validation.MinimumLengthValidator'}, {\n 'NAME':\n 'django.contrib.auth.password_validation.CommonPasswordValidator'}, {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}\n ]\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nCACHES = {'default': {'BACKEND':\n 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION':\n 'local-memory-lru'}, 'redis': {'BACKEND':\n 'django_redis.cache.RedisCache', 'LOCATION': 'redis://127.0.0.1:6379/0',\n 'OPTIONS': {'CLIENT_CLASS': 'django_redis.client.DefaultClient',\n 'CONNECTION_POOL_KWARGS': {'max_connections': 100}}}}\nSESSION_ENGINE = 'django.contrib.sessions.backends.cache'\nSESSION_CACHE_ALIAS = 'redis'\nSESSION_COOKIE_AGE = 3600 * 24\nSTATIC_URL = '/static/'\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_ALLOW_CREDENTIALS = True\n",
"step-4": "import os\nfrom apps.app_base.app_utils.cryp_key import decrypt, get_secret_key\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\nSECRET_KEY = get_secret_key\n\nDEBUG = True\n\nALLOWED_HOSTS = ['.localhost', '127.0.0.1', '[::1]']\n\n\n# Application definition\n\nINSTALLED_APPS = [\n 'corsheaders',\n 'django.contrib.sessions',\n]\n\nMIDDLEWARE = [\n # CORS\n 'corsheaders.middleware.CorsMiddleware',\n # Session\n 'django.contrib.sessions.middleware.SessionMiddleware',\n # Cache\n 'django.middleware.cache.UpdateCacheMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.cache.FetchFromCacheMiddleware',\n]\n\nROOT_URLCONF = 'apps.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'apps.wsgi.application'\n\n\n# Database\n# https://docs.djangoproject.com/en/3.0/ref/settings/#databases\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql',\n 'NAME': 'luck',\n 'USER': 'postgres',\n 'PASSWORD': decrypt(b'gAAAAABfesT5OW3keTFXv6sUP_4NWJfG6U_ZEInkmCvJGdVSNA74VPJeG3lZLky8ZWEsjLsdxe_k_vgVCSIVCoTx1hOQsTb1kw=='),\n 'HOST': '127.0.0.1',\n 'PORT': '5432'\n }\n}\n\n\n# Password validation\n# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\n\n# Internationalization\n# https://docs.djangoproject.com/en/3.0/topics/i18n/\n\nLANGUAGE_CODE = 'en-us'\n\nTIME_ZONE = 'UTC'\n\nUSE_I18N = True\n\nUSE_L10N = True\n\nUSE_TZ = True\n\n\nCACHES = {\n # Local Memory Cache https://docs.djangoproject.com/en/3.1/topics/cache/\n \"default\": {\n 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',\n 'LOCATION': 'local-memory-lru',\n },\n \"redis\": {\n \"BACKEND\": \"django_redis.cache.RedisCache\",\n \"LOCATION\": \"redis://127.0.0.1:6379/0\", # db0\n \"OPTIONS\": {\n \"CLIENT_CLASS\": \"django_redis.client.DefaultClient\",\n \"CONNECTION_POOL_KWARGS\": {\"max_connections\": 100}\n }\n }\n}\n\n# Use Redis for session\nSESSION_ENGINE = \"django.contrib.sessions.backends.cache\"\nSESSION_CACHE_ALIAS = \"redis\"\nSESSION_COOKIE_AGE = 3600 * 24 # In seconds\n\n\nSTATIC_URL = '/static/'\n\nCORS_ORIGIN_ALLOW_ALL = True\nCORS_ALLOW_CREDENTIALS = True\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2014-2016 by Mike Taylor
:license: MIT, see LICENSE for more details.
Micropub Tools
"""
import requests
from bs4 import BeautifulSoup, SoupStrainer
try: # Python v3
from urllib.parse import urlparse, urljoin
except ImportError:
from urlparse import urlparse, urljoin
import ronkyuu
_html_parser = 'lxml' # 'html.parser', 'lxml', 'lxml-xml', 'html5lib'
def setParser(htmlParser='html5lib'):
global _html_parser
_html_parser = htmlParser
# find an endpoint
# look in headers for given domain for a HTTP Link header
# if not found, look for an HTML <link> element in page returned from domain given
def discoverEndpoint(domain, endpoint, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):
"""Find the given endpoint for the given domain.
Only scan html element matching all criteria in look_in.
optionally the content to be scanned can be given as an argument.
:param domain: the URL of the domain to handle
:param endpoint: list of endpoints to look for
:param content: the content to be scanned for the endpoint
:param look_in: dictionary with name, id and class_. only element matching all of these will be scanned
:param test_urls: optional flag to test URLs for validation
:param validateCerts: optional flag to enforce HTTPS certificates if present
:rtype: list of endpoints
"""
if test_urls:
ronkyuu.URLValidator(message='invalid domain URL')(domain)
if content:
result = {'status': requests.codes.ok,
'headers': None,
'content': content
}
else:
r = requests.get(domain, verify=validateCerts)
result = {'status': r.status_code,
'headers': r.headers
}
# check for character encodings and use 'correct' data
if 'charset' in r.headers.get('content-type', ''):
result['content'] = r.text
else:
result['content'] = r.content
for key in endpoint:
result.update({key: set()})
result.update({'domain': domain})
if result['status'] == requests.codes.ok:
if 'link' in r.headers:
all_links = r.headers['link'].split(',', 1)
for link in all_links:
if ';' in link:
href, rel = link.split(';')
url = urlparse(href.strip()[1:-1])
if url.scheme in ('http', 'https') and rel in endpoint:
result[rel].add(url)
all_links = BeautifulSoup(result['content'], _html_parser, parse_only=SoupStrainer(**look_in)).find_all('link')
for link in all_links:
rel = link.get('rel', None)[0]
if rel in endpoint:
href = link.get('href', None)
if href:
url = urlparse(href)
if url.scheme == '' or url.netloc == '':
url = urlparse(urljoin(domain, href))
if url.scheme in ('http', 'https'):
result[rel].add(url)
return result
def discoverMicropubEndpoints(domain, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):
"""Find the micropub for the given domain.
Only scan html element matching all criteria in look_in.
optionally the content to be scanned can be given as an argument.
:param domain: the URL of the domain to handle
:param content: the content to be scanned for the endpoint
:param look_in: dictionary with name, id and class_. only element matching all of these will be scanned
:param test_urls: optional flag to test URLs for validation
:param validateCerts: optional flag to enforce HTTPS certificates if present
:rtype: list of endpoints
"""
return discoverEndpoint(domain, ('micropub',), content, look_in, test_urls, validateCerts)
def discoverTokenEndpoints(domain, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):
"""Find the token for the given domain.
Only scan html element matching all criteria in look_in.
optionally the content to be scanned can be given as an argument.
:param domain: the URL of the domain to handle
:param content: the content to be scanned for the endpoint
:param look_in: dictionary with name, id and class_. only element matching all of these will be scanned
:param test_urls: optional flag to test URLs for validation
:param validateCerts: optional flag to enforce HTTPS certificates if present
:rtype: list of endpoints
"""
return discoverEndpoint(domain, ('token_endpoint',), content, look_in, test_urls, validateCerts)
|
normal
|
{
"blob_id": "1bb82a24faed6079ec161d95eff22aa122295c13",
"index": 3982,
"step-1": "<mask token>\n\n\ndef setParser(htmlParser='html5lib'):\n global _html_parser\n _html_parser = htmlParser\n\n\ndef discoverEndpoint(domain, endpoint, content=None, look_in={'name':\n 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the given endpoint for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param endpoint: list of endpoints to look for\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n if test_urls:\n ronkyuu.URLValidator(message='invalid domain URL')(domain)\n if content:\n result = {'status': requests.codes.ok, 'headers': None, 'content':\n content}\n else:\n r = requests.get(domain, verify=validateCerts)\n result = {'status': r.status_code, 'headers': r.headers}\n if 'charset' in r.headers.get('content-type', ''):\n result['content'] = r.text\n else:\n result['content'] = r.content\n for key in endpoint:\n result.update({key: set()})\n result.update({'domain': domain})\n if result['status'] == requests.codes.ok:\n if 'link' in r.headers:\n all_links = r.headers['link'].split(',', 1)\n for link in all_links:\n if ';' in link:\n href, rel = link.split(';')\n url = urlparse(href.strip()[1:-1])\n if url.scheme in ('http', 'https') and rel in endpoint:\n result[rel].add(url)\n all_links = BeautifulSoup(result['content'], _html_parser,\n parse_only=SoupStrainer(**look_in)).find_all('link')\n for link in all_links:\n rel = link.get('rel', None)[0]\n if rel in endpoint:\n href = link.get('href', None)\n if href:\n url = urlparse(href)\n if url.scheme == '' or url.netloc == '':\n url = urlparse(urljoin(domain, href))\n if url.scheme in ('http', 'https'):\n result[rel].add(url)\n return result\n\n\n<mask token>\n",
"step-2": "<mask token>\ntry:\n from urllib.parse import urlparse, urljoin\nexcept ImportError:\n from urlparse import urlparse, urljoin\n<mask token>\n\n\ndef setParser(htmlParser='html5lib'):\n global _html_parser\n _html_parser = htmlParser\n\n\ndef discoverEndpoint(domain, endpoint, content=None, look_in={'name':\n 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the given endpoint for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param endpoint: list of endpoints to look for\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n if test_urls:\n ronkyuu.URLValidator(message='invalid domain URL')(domain)\n if content:\n result = {'status': requests.codes.ok, 'headers': None, 'content':\n content}\n else:\n r = requests.get(domain, verify=validateCerts)\n result = {'status': r.status_code, 'headers': r.headers}\n if 'charset' in r.headers.get('content-type', ''):\n result['content'] = r.text\n else:\n result['content'] = r.content\n for key in endpoint:\n result.update({key: set()})\n result.update({'domain': domain})\n if result['status'] == requests.codes.ok:\n if 'link' in r.headers:\n all_links = r.headers['link'].split(',', 1)\n for link in all_links:\n if ';' in link:\n href, rel = link.split(';')\n url = urlparse(href.strip()[1:-1])\n if url.scheme in ('http', 'https') and rel in endpoint:\n result[rel].add(url)\n all_links = BeautifulSoup(result['content'], _html_parser,\n parse_only=SoupStrainer(**look_in)).find_all('link')\n for link in all_links:\n rel = link.get('rel', None)[0]\n if rel in endpoint:\n href = link.get('href', None)\n if href:\n url = urlparse(href)\n if url.scheme == '' or url.netloc == '':\n url = urlparse(urljoin(domain, href))\n if url.scheme in ('http', 'https'):\n result[rel].add(url)\n return result\n\n\ndef discoverMicropubEndpoints(domain, content=None, look_in={'name': 'link'\n }, test_urls=True, validateCerts=True):\n \"\"\"Find the micropub for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('micropub',), content, look_in,\n test_urls, validateCerts)\n\n\ndef discoverTokenEndpoints(domain, content=None, look_in={'name': 'link'},\n test_urls=True, validateCerts=True):\n \"\"\"Find the token for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('token_endpoint',), content, look_in,\n test_urls, validateCerts)\n",
"step-3": "<mask token>\ntry:\n from urllib.parse import urlparse, urljoin\nexcept ImportError:\n from urlparse import urlparse, urljoin\n<mask token>\n_html_parser = 'lxml'\n\n\ndef setParser(htmlParser='html5lib'):\n global _html_parser\n _html_parser = htmlParser\n\n\ndef discoverEndpoint(domain, endpoint, content=None, look_in={'name':\n 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the given endpoint for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param endpoint: list of endpoints to look for\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n if test_urls:\n ronkyuu.URLValidator(message='invalid domain URL')(domain)\n if content:\n result = {'status': requests.codes.ok, 'headers': None, 'content':\n content}\n else:\n r = requests.get(domain, verify=validateCerts)\n result = {'status': r.status_code, 'headers': r.headers}\n if 'charset' in r.headers.get('content-type', ''):\n result['content'] = r.text\n else:\n result['content'] = r.content\n for key in endpoint:\n result.update({key: set()})\n result.update({'domain': domain})\n if result['status'] == requests.codes.ok:\n if 'link' in r.headers:\n all_links = r.headers['link'].split(',', 1)\n for link in all_links:\n if ';' in link:\n href, rel = link.split(';')\n url = urlparse(href.strip()[1:-1])\n if url.scheme in ('http', 'https') and rel in endpoint:\n result[rel].add(url)\n all_links = BeautifulSoup(result['content'], _html_parser,\n parse_only=SoupStrainer(**look_in)).find_all('link')\n for link in all_links:\n rel = link.get('rel', None)[0]\n if rel in endpoint:\n href = link.get('href', None)\n if href:\n url = urlparse(href)\n if url.scheme == '' or url.netloc == '':\n url = urlparse(urljoin(domain, href))\n if url.scheme in ('http', 'https'):\n result[rel].add(url)\n return result\n\n\ndef discoverMicropubEndpoints(domain, content=None, look_in={'name': 'link'\n }, test_urls=True, validateCerts=True):\n \"\"\"Find the micropub for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('micropub',), content, look_in,\n test_urls, validateCerts)\n\n\ndef discoverTokenEndpoints(domain, content=None, look_in={'name': 'link'},\n test_urls=True, validateCerts=True):\n \"\"\"Find the token for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('token_endpoint',), content, look_in,\n test_urls, validateCerts)\n",
"step-4": "<mask token>\nimport requests\nfrom bs4 import BeautifulSoup, SoupStrainer\ntry:\n from urllib.parse import urlparse, urljoin\nexcept ImportError:\n from urlparse import urlparse, urljoin\nimport ronkyuu\n_html_parser = 'lxml'\n\n\ndef setParser(htmlParser='html5lib'):\n global _html_parser\n _html_parser = htmlParser\n\n\ndef discoverEndpoint(domain, endpoint, content=None, look_in={'name':\n 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the given endpoint for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param endpoint: list of endpoints to look for\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n if test_urls:\n ronkyuu.URLValidator(message='invalid domain URL')(domain)\n if content:\n result = {'status': requests.codes.ok, 'headers': None, 'content':\n content}\n else:\n r = requests.get(domain, verify=validateCerts)\n result = {'status': r.status_code, 'headers': r.headers}\n if 'charset' in r.headers.get('content-type', ''):\n result['content'] = r.text\n else:\n result['content'] = r.content\n for key in endpoint:\n result.update({key: set()})\n result.update({'domain': domain})\n if result['status'] == requests.codes.ok:\n if 'link' in r.headers:\n all_links = r.headers['link'].split(',', 1)\n for link in all_links:\n if ';' in link:\n href, rel = link.split(';')\n url = urlparse(href.strip()[1:-1])\n if url.scheme in ('http', 'https') and rel in endpoint:\n result[rel].add(url)\n all_links = BeautifulSoup(result['content'], _html_parser,\n parse_only=SoupStrainer(**look_in)).find_all('link')\n for link in all_links:\n rel = link.get('rel', None)[0]\n if rel in endpoint:\n href = link.get('href', None)\n if href:\n url = urlparse(href)\n if url.scheme == '' or url.netloc == '':\n url = urlparse(urljoin(domain, href))\n if url.scheme in ('http', 'https'):\n result[rel].add(url)\n return result\n\n\ndef discoverMicropubEndpoints(domain, content=None, look_in={'name': 'link'\n }, test_urls=True, validateCerts=True):\n \"\"\"Find the micropub for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('micropub',), content, look_in,\n test_urls, validateCerts)\n\n\ndef discoverTokenEndpoints(domain, content=None, look_in={'name': 'link'},\n test_urls=True, validateCerts=True):\n \"\"\"Find the token for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('token_endpoint',), content, look_in,\n test_urls, validateCerts)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\n:copyright: (c) 2014-2016 by Mike Taylor\n:license: MIT, see LICENSE for more details.\n\nMicropub Tools\n\"\"\"\n\nimport requests\nfrom bs4 import BeautifulSoup, SoupStrainer\n\ntry: # Python v3\n from urllib.parse import urlparse, urljoin\nexcept ImportError:\n from urlparse import urlparse, urljoin\n\nimport ronkyuu\n\n\n_html_parser = 'lxml' # 'html.parser', 'lxml', 'lxml-xml', 'html5lib'\n\ndef setParser(htmlParser='html5lib'):\n global _html_parser\n _html_parser = htmlParser\n\n\n# find an endpoint\n# look in headers for given domain for a HTTP Link header\n# if not found, look for an HTML <link> element in page returned from domain given\n\ndef discoverEndpoint(domain, endpoint, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the given endpoint for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param endpoint: list of endpoints to look for\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n if test_urls:\n ronkyuu.URLValidator(message='invalid domain URL')(domain)\n\n if content:\n result = {'status': requests.codes.ok,\n 'headers': None,\n 'content': content\n }\n else:\n r = requests.get(domain, verify=validateCerts)\n result = {'status': r.status_code,\n 'headers': r.headers\n }\n # check for character encodings and use 'correct' data\n if 'charset' in r.headers.get('content-type', ''):\n result['content'] = r.text\n else:\n result['content'] = r.content\n\n for key in endpoint:\n result.update({key: set()})\n result.update({'domain': domain})\n\n if result['status'] == requests.codes.ok:\n if 'link' in r.headers:\n all_links = r.headers['link'].split(',', 1)\n for link in all_links:\n if ';' in link:\n href, rel = link.split(';')\n url = urlparse(href.strip()[1:-1])\n if url.scheme in ('http', 'https') and rel in endpoint:\n result[rel].add(url)\n\n all_links = BeautifulSoup(result['content'], _html_parser, parse_only=SoupStrainer(**look_in)).find_all('link')\n for link in all_links:\n rel = link.get('rel', None)[0]\n if rel in endpoint:\n href = link.get('href', None)\n if href:\n url = urlparse(href)\n if url.scheme == '' or url.netloc == '':\n url = urlparse(urljoin(domain, href))\n if url.scheme in ('http', 'https'):\n result[rel].add(url)\n return result\n\ndef discoverMicropubEndpoints(domain, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the micropub for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('micropub',), content, look_in, test_urls, validateCerts)\n\ndef discoverTokenEndpoints(domain, content=None, look_in={'name': 'link'}, test_urls=True, validateCerts=True):\n \"\"\"Find the token for the given domain.\n Only scan html element matching all criteria in look_in.\n\n optionally the content to be scanned can be given as an argument.\n\n :param domain: the URL of the domain to handle\n :param content: the content to be scanned for the endpoint\n :param look_in: dictionary with name, id and class_. only element matching all of these will be scanned\n :param test_urls: optional flag to test URLs for validation\n :param validateCerts: optional flag to enforce HTTPS certificates if present\n :rtype: list of endpoints\n \"\"\"\n return discoverEndpoint(domain, ('token_endpoint',), content, look_in, test_urls, validateCerts)\n",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
#!/usr/bin/env python3
# coding=utf-8
import fire
import json
import os
import time
import requests
import time
import hashlib
import random
root_path, file_name = os.path.split(os.path.realpath(__file__))
ip_list_path = ''.join([root_path, os.path.sep, 'ip_list.json'])
class ProxySwift(object):
server_id = '1'
def requerst_get(self, url, data, *p, **kwargs):
SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'
PartnerID = '2017061217350058'
TimeStamp = int(time.time())
source_data = {
'partner_id': PartnerID,
'timestamp': TimeStamp
}
source_data.update(data)
tmp_data = [i for i in source_data.items()]
tmp_data = sorted(tmp_data, key=lambda i: i[0])
url_list = ['{}{}'.format(*i) for i in tmp_data]
# url_list.reverse()
# sign = ''.join(url_list)
# sign = ''.join(sorted(sign))
sign = ''.join(url_list)
# sign = ''.join(sorted(sign))
data = sign + SecretKey
md_5 = hashlib.md5()
md_5.update(data.encode("utf-8"))
sign = md_5.hexdigest()
source_data.update({'sign': sign})
return requests.get(url, params=source_data, verify=False, *p, **kwargs)
def get_ip(self, interface_id='', pool_id=''):
url = 'https://api.proxyswift.com/ip/get'
data = {
'server_id': self.server_id,
'pool_id': pool_id,
'interface_id': interface_id,
}
r = self.requerst_get(url, data)
response = r.json()
return response
def get_task(self, task_id):
url = 'https://api.proxyswift.com/task/get'
data = {'task_id': task_id}
r = self.requerst_get(url, data)
return r.json()
def changes_ip(self, interface_id, filter=24):
url = 'https://api.proxyswift.com/ip/change'
data = {
'server_id': self.server_id,
'interface_id': interface_id,
'filter': filter,
}
r = self.requerst_get(url, data)
task_id = r.json()['taskId']
#status = self(task_id)['status']
i = 1
while True:
time.sleep(i%2+1)
status = self.get_task(task_id)['status']
if status == 'success':
ip_port = self.get_ip(interface_id)
return ip_port
class ProxyPool(object):
def __init__(self, proxyswift=ProxySwift(), interval=4):
self.interval = interval
self.ps = proxyswift
self.count = 0
self.index = 0
with open(ip_list_path, 'r', encoding='utf-8') as f:
self.pool = json.loads(f.read())
def get(self):
# 从 pool中随机取一个ip
with open(ip_list_path, 'r', encoding='utf-8') as f:
self.pool = json.loads(f.read())
ip = random.choice(self.pool)
ip = "{0}:{1}".format(ip['ip'], ip['port'])
print(ip)
return ip
def change_ip(self, proxy_server):
for ip in self.pool:
if proxy_server == "http://%(ip)s:%(port)s" % ip:
self.pool.pop(0)
self.ps.changes_ip(ip['id'])
self.pool = self.ps.get_ip()
time.sleep(1)
break
self.refresh_ip()
def refresh_ip(self):
time.sleep(5)
self.pool = self.ps.get_ip()
print(self.pool)
# os.environ['ip_list'] = json.dumps(self.ps.get_ip())
with open(ip_list_path, 'w', encoding='utf-8') as f:
f.write(json.dumps(self.ps.get_ip()))
def main():
fire.Fire(ProxyPool)
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "0ff96b2314927d7b3e763242e554fd561f3c9343",
"index": 5872,
"step-1": "<mask token>\n\n\nclass ProxySwift(object):\n <mask token>\n\n def requerst_get(self, url, data, *p, **kwargs):\n SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'\n PartnerID = '2017061217350058'\n TimeStamp = int(time.time())\n source_data = {'partner_id': PartnerID, 'timestamp': TimeStamp}\n source_data.update(data)\n tmp_data = [i for i in source_data.items()]\n tmp_data = sorted(tmp_data, key=lambda i: i[0])\n url_list = ['{}{}'.format(*i) for i in tmp_data]\n sign = ''.join(url_list)\n data = sign + SecretKey\n md_5 = hashlib.md5()\n md_5.update(data.encode('utf-8'))\n sign = md_5.hexdigest()\n source_data.update({'sign': sign})\n return requests.get(url, *p, params=source_data, verify=False, **kwargs\n )\n\n def get_ip(self, interface_id='', pool_id=''):\n url = 'https://api.proxyswift.com/ip/get'\n data = {'server_id': self.server_id, 'pool_id': pool_id,\n 'interface_id': interface_id}\n r = self.requerst_get(url, data)\n response = r.json()\n return response\n <mask token>\n\n def changes_ip(self, interface_id, filter=24):\n url = 'https://api.proxyswift.com/ip/change'\n data = {'server_id': self.server_id, 'interface_id': interface_id,\n 'filter': filter}\n r = self.requerst_get(url, data)\n task_id = r.json()['taskId']\n i = 1\n while True:\n time.sleep(i % 2 + 1)\n status = self.get_task(task_id)['status']\n if status == 'success':\n ip_port = self.get_ip(interface_id)\n return ip_port\n\n\nclass ProxyPool(object):\n\n def __init__(self, proxyswift=ProxySwift(), interval=4):\n self.interval = interval\n self.ps = proxyswift\n self.count = 0\n self.index = 0\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n\n def get(self):\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n ip = random.choice(self.pool)\n ip = '{0}:{1}'.format(ip['ip'], ip['port'])\n print(ip)\n return ip\n\n def change_ip(self, proxy_server):\n for ip in self.pool:\n if proxy_server == 'http://%(ip)s:%(port)s' % ip:\n self.pool.pop(0)\n self.ps.changes_ip(ip['id'])\n self.pool = self.ps.get_ip()\n time.sleep(1)\n break\n self.refresh_ip()\n\n def refresh_ip(self):\n time.sleep(5)\n self.pool = self.ps.get_ip()\n print(self.pool)\n with open(ip_list_path, 'w', encoding='utf-8') as f:\n f.write(json.dumps(self.ps.get_ip()))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ProxySwift(object):\n <mask token>\n\n def requerst_get(self, url, data, *p, **kwargs):\n SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'\n PartnerID = '2017061217350058'\n TimeStamp = int(time.time())\n source_data = {'partner_id': PartnerID, 'timestamp': TimeStamp}\n source_data.update(data)\n tmp_data = [i for i in source_data.items()]\n tmp_data = sorted(tmp_data, key=lambda i: i[0])\n url_list = ['{}{}'.format(*i) for i in tmp_data]\n sign = ''.join(url_list)\n data = sign + SecretKey\n md_5 = hashlib.md5()\n md_5.update(data.encode('utf-8'))\n sign = md_5.hexdigest()\n source_data.update({'sign': sign})\n return requests.get(url, *p, params=source_data, verify=False, **kwargs\n )\n\n def get_ip(self, interface_id='', pool_id=''):\n url = 'https://api.proxyswift.com/ip/get'\n data = {'server_id': self.server_id, 'pool_id': pool_id,\n 'interface_id': interface_id}\n r = self.requerst_get(url, data)\n response = r.json()\n return response\n\n def get_task(self, task_id):\n url = 'https://api.proxyswift.com/task/get'\n data = {'task_id': task_id}\n r = self.requerst_get(url, data)\n return r.json()\n\n def changes_ip(self, interface_id, filter=24):\n url = 'https://api.proxyswift.com/ip/change'\n data = {'server_id': self.server_id, 'interface_id': interface_id,\n 'filter': filter}\n r = self.requerst_get(url, data)\n task_id = r.json()['taskId']\n i = 1\n while True:\n time.sleep(i % 2 + 1)\n status = self.get_task(task_id)['status']\n if status == 'success':\n ip_port = self.get_ip(interface_id)\n return ip_port\n\n\nclass ProxyPool(object):\n\n def __init__(self, proxyswift=ProxySwift(), interval=4):\n self.interval = interval\n self.ps = proxyswift\n self.count = 0\n self.index = 0\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n\n def get(self):\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n ip = random.choice(self.pool)\n ip = '{0}:{1}'.format(ip['ip'], ip['port'])\n print(ip)\n return ip\n\n def change_ip(self, proxy_server):\n for ip in self.pool:\n if proxy_server == 'http://%(ip)s:%(port)s' % ip:\n self.pool.pop(0)\n self.ps.changes_ip(ip['id'])\n self.pool = self.ps.get_ip()\n time.sleep(1)\n break\n self.refresh_ip()\n\n def refresh_ip(self):\n time.sleep(5)\n self.pool = self.ps.get_ip()\n print(self.pool)\n with open(ip_list_path, 'w', encoding='utf-8') as f:\n f.write(json.dumps(self.ps.get_ip()))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ProxySwift(object):\n server_id = '1'\n\n def requerst_get(self, url, data, *p, **kwargs):\n SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'\n PartnerID = '2017061217350058'\n TimeStamp = int(time.time())\n source_data = {'partner_id': PartnerID, 'timestamp': TimeStamp}\n source_data.update(data)\n tmp_data = [i for i in source_data.items()]\n tmp_data = sorted(tmp_data, key=lambda i: i[0])\n url_list = ['{}{}'.format(*i) for i in tmp_data]\n sign = ''.join(url_list)\n data = sign + SecretKey\n md_5 = hashlib.md5()\n md_5.update(data.encode('utf-8'))\n sign = md_5.hexdigest()\n source_data.update({'sign': sign})\n return requests.get(url, *p, params=source_data, verify=False, **kwargs\n )\n\n def get_ip(self, interface_id='', pool_id=''):\n url = 'https://api.proxyswift.com/ip/get'\n data = {'server_id': self.server_id, 'pool_id': pool_id,\n 'interface_id': interface_id}\n r = self.requerst_get(url, data)\n response = r.json()\n return response\n\n def get_task(self, task_id):\n url = 'https://api.proxyswift.com/task/get'\n data = {'task_id': task_id}\n r = self.requerst_get(url, data)\n return r.json()\n\n def changes_ip(self, interface_id, filter=24):\n url = 'https://api.proxyswift.com/ip/change'\n data = {'server_id': self.server_id, 'interface_id': interface_id,\n 'filter': filter}\n r = self.requerst_get(url, data)\n task_id = r.json()['taskId']\n i = 1\n while True:\n time.sleep(i % 2 + 1)\n status = self.get_task(task_id)['status']\n if status == 'success':\n ip_port = self.get_ip(interface_id)\n return ip_port\n\n\nclass ProxyPool(object):\n\n def __init__(self, proxyswift=ProxySwift(), interval=4):\n self.interval = interval\n self.ps = proxyswift\n self.count = 0\n self.index = 0\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n\n def get(self):\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n ip = random.choice(self.pool)\n ip = '{0}:{1}'.format(ip['ip'], ip['port'])\n print(ip)\n return ip\n\n def change_ip(self, proxy_server):\n for ip in self.pool:\n if proxy_server == 'http://%(ip)s:%(port)s' % ip:\n self.pool.pop(0)\n self.ps.changes_ip(ip['id'])\n self.pool = self.ps.get_ip()\n time.sleep(1)\n break\n self.refresh_ip()\n\n def refresh_ip(self):\n time.sleep(5)\n self.pool = self.ps.get_ip()\n print(self.pool)\n with open(ip_list_path, 'w', encoding='utf-8') as f:\n f.write(json.dumps(self.ps.get_ip()))\n\n\ndef main():\n fire.Fire(ProxyPool)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "<mask token>\nroot_path, file_name = os.path.split(os.path.realpath(__file__))\nip_list_path = ''.join([root_path, os.path.sep, 'ip_list.json'])\n\n\nclass ProxySwift(object):\n server_id = '1'\n\n def requerst_get(self, url, data, *p, **kwargs):\n SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'\n PartnerID = '2017061217350058'\n TimeStamp = int(time.time())\n source_data = {'partner_id': PartnerID, 'timestamp': TimeStamp}\n source_data.update(data)\n tmp_data = [i for i in source_data.items()]\n tmp_data = sorted(tmp_data, key=lambda i: i[0])\n url_list = ['{}{}'.format(*i) for i in tmp_data]\n sign = ''.join(url_list)\n data = sign + SecretKey\n md_5 = hashlib.md5()\n md_5.update(data.encode('utf-8'))\n sign = md_5.hexdigest()\n source_data.update({'sign': sign})\n return requests.get(url, *p, params=source_data, verify=False, **kwargs\n )\n\n def get_ip(self, interface_id='', pool_id=''):\n url = 'https://api.proxyswift.com/ip/get'\n data = {'server_id': self.server_id, 'pool_id': pool_id,\n 'interface_id': interface_id}\n r = self.requerst_get(url, data)\n response = r.json()\n return response\n\n def get_task(self, task_id):\n url = 'https://api.proxyswift.com/task/get'\n data = {'task_id': task_id}\n r = self.requerst_get(url, data)\n return r.json()\n\n def changes_ip(self, interface_id, filter=24):\n url = 'https://api.proxyswift.com/ip/change'\n data = {'server_id': self.server_id, 'interface_id': interface_id,\n 'filter': filter}\n r = self.requerst_get(url, data)\n task_id = r.json()['taskId']\n i = 1\n while True:\n time.sleep(i % 2 + 1)\n status = self.get_task(task_id)['status']\n if status == 'success':\n ip_port = self.get_ip(interface_id)\n return ip_port\n\n\nclass ProxyPool(object):\n\n def __init__(self, proxyswift=ProxySwift(), interval=4):\n self.interval = interval\n self.ps = proxyswift\n self.count = 0\n self.index = 0\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n\n def get(self):\n with open(ip_list_path, 'r', encoding='utf-8') as f:\n self.pool = json.loads(f.read())\n ip = random.choice(self.pool)\n ip = '{0}:{1}'.format(ip['ip'], ip['port'])\n print(ip)\n return ip\n\n def change_ip(self, proxy_server):\n for ip in self.pool:\n if proxy_server == 'http://%(ip)s:%(port)s' % ip:\n self.pool.pop(0)\n self.ps.changes_ip(ip['id'])\n self.pool = self.ps.get_ip()\n time.sleep(1)\n break\n self.refresh_ip()\n\n def refresh_ip(self):\n time.sleep(5)\n self.pool = self.ps.get_ip()\n print(self.pool)\n with open(ip_list_path, 'w', encoding='utf-8') as f:\n f.write(json.dumps(self.ps.get_ip()))\n\n\ndef main():\n fire.Fire(ProxyPool)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/env python3\r\n# coding=utf-8\r\nimport fire\r\nimport json\r\nimport os\r\nimport time\r\nimport requests\r\nimport time\r\nimport hashlib\r\nimport random\r\n\r\nroot_path, file_name = os.path.split(os.path.realpath(__file__))\r\nip_list_path = ''.join([root_path, os.path.sep, 'ip_list.json'])\r\n\r\n\r\nclass ProxySwift(object):\r\n server_id = '1'\r\n\r\n def requerst_get(self, url, data, *p, **kwargs):\r\n SecretKey = '3JCx8fAF7Bpq5Aj4t9wS7cfVB7hpXZ7j'\r\n\r\n PartnerID = '2017061217350058'\r\n TimeStamp = int(time.time())\r\n\r\n\r\n source_data = {\r\n 'partner_id': PartnerID,\r\n 'timestamp': TimeStamp\r\n }\r\n\r\n source_data.update(data)\r\n\r\n tmp_data = [i for i in source_data.items()]\r\n tmp_data = sorted(tmp_data, key=lambda i: i[0])\r\n\r\n url_list = ['{}{}'.format(*i) for i in tmp_data]\r\n # url_list.reverse()\r\n # sign = ''.join(url_list)\r\n # sign = ''.join(sorted(sign))\r\n\r\n sign = ''.join(url_list)\r\n # sign = ''.join(sorted(sign))\r\n\r\n data = sign + SecretKey\r\n md_5 = hashlib.md5()\r\n md_5.update(data.encode(\"utf-8\"))\r\n sign = md_5.hexdigest()\r\n source_data.update({'sign': sign})\r\n return requests.get(url, params=source_data, verify=False, *p, **kwargs)\r\n\r\n def get_ip(self, interface_id='', pool_id=''):\r\n url = 'https://api.proxyswift.com/ip/get'\r\n data = {\r\n 'server_id': self.server_id,\r\n 'pool_id': pool_id,\r\n 'interface_id': interface_id,\r\n }\r\n r = self.requerst_get(url, data)\r\n response = r.json()\r\n return response\r\n\r\n def get_task(self, task_id):\r\n url = 'https://api.proxyswift.com/task/get'\r\n data = {'task_id': task_id}\r\n r = self.requerst_get(url, data)\r\n\r\n return r.json()\r\n\r\n def changes_ip(self, interface_id, filter=24):\r\n url = 'https://api.proxyswift.com/ip/change'\r\n data = {\r\n 'server_id': self.server_id,\r\n 'interface_id': interface_id,\r\n 'filter': filter,\r\n }\r\n\r\n r = self.requerst_get(url, data)\r\n task_id = r.json()['taskId']\r\n #status = self(task_id)['status']\r\n\r\n i = 1\r\n while True:\r\n time.sleep(i%2+1)\r\n status = self.get_task(task_id)['status']\r\n if status == 'success':\r\n ip_port = self.get_ip(interface_id)\r\n return ip_port\r\n\r\n\r\nclass ProxyPool(object):\r\n def __init__(self, proxyswift=ProxySwift(), interval=4):\r\n\r\n self.interval = interval\r\n self.ps = proxyswift\r\n self.count = 0\r\n self.index = 0\r\n\r\n with open(ip_list_path, 'r', encoding='utf-8') as f:\r\n self.pool = json.loads(f.read())\r\n\r\n def get(self):\r\n # 从 pool中随机取一个ip\r\n with open(ip_list_path, 'r', encoding='utf-8') as f:\r\n self.pool = json.loads(f.read())\r\n ip = random.choice(self.pool)\r\n ip = \"{0}:{1}\".format(ip['ip'], ip['port'])\r\n print(ip)\r\n return ip\r\n\r\n def change_ip(self, proxy_server):\r\n for ip in self.pool:\r\n if proxy_server == \"http://%(ip)s:%(port)s\" % ip:\r\n self.pool.pop(0)\r\n self.ps.changes_ip(ip['id'])\r\n self.pool = self.ps.get_ip()\r\n time.sleep(1)\r\n break\r\n self.refresh_ip()\r\n\r\n def refresh_ip(self):\r\n time.sleep(5)\r\n self.pool = self.ps.get_ip()\r\n print(self.pool)\r\n # os.environ['ip_list'] = json.dumps(self.ps.get_ip())\r\n with open(ip_list_path, 'w', encoding='utf-8') as f:\r\n f.write(json.dumps(self.ps.get_ip()))\r\n\r\n\r\ndef main():\r\n fire.Fire(ProxyPool)\r\n\r\nif __name__ == '__main__':\r\n main()",
"step-ids": [
9,
10,
13,
14,
16
]
}
|
[
9,
10,
13,
14,
16
] |
from models import Sensor
import mysql.connector as mariadb
## CREATE A DB WITH MARIADB ##
mariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')
cursor = mariadb_connection.cursor()
def closeConnection():
cursor.close()
mariadb_connection.close()
return
def getTasks(amount):
mariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')
cursor = mariadb_connection.cursor()
all_data = []
cursor.execute("SELECT * FROM Sensor")
all_entries = cursor.fetchall()
for row in all_entries:
entry = Sensor(row[0], row[1], row[2])
all_data.append(entry.data)
closeConnection()
return all_data
def getTask(task_id):
mariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')
cursor = mariadb_connection.cursor()
cursor.execute("SELECT * FROM Sensor WHERE ID={}".format(task_id))
entry = cursor.fetchall()
data = Sensor(entry[0][0], entry[0][1], entry[0][2])
closeConnection()
return data.data
|
normal
|
{
"blob_id": "f471062573a5ec8cfeb194168edfba3d2700cac6",
"index": 9845,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef closeConnection():\n cursor.close()\n mariadb_connection.close()\n return\n\n\ndef getTasks(amount):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n all_data = []\n cursor.execute('SELECT * FROM Sensor')\n all_entries = cursor.fetchall()\n for row in all_entries:\n entry = Sensor(row[0], row[1], row[2])\n all_data.append(entry.data)\n closeConnection()\n return all_data\n\n\ndef getTask(task_id):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n cursor.execute('SELECT * FROM Sensor WHERE ID={}'.format(task_id))\n entry = cursor.fetchall()\n data = Sensor(entry[0][0], entry[0][1], entry[0][2])\n closeConnection()\n return data.data\n",
"step-3": "<mask token>\nmariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\ncursor = mariadb_connection.cursor()\n\n\ndef closeConnection():\n cursor.close()\n mariadb_connection.close()\n return\n\n\ndef getTasks(amount):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n all_data = []\n cursor.execute('SELECT * FROM Sensor')\n all_entries = cursor.fetchall()\n for row in all_entries:\n entry = Sensor(row[0], row[1], row[2])\n all_data.append(entry.data)\n closeConnection()\n return all_data\n\n\ndef getTask(task_id):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n cursor.execute('SELECT * FROM Sensor WHERE ID={}'.format(task_id))\n entry = cursor.fetchall()\n data = Sensor(entry[0][0], entry[0][1], entry[0][2])\n closeConnection()\n return data.data\n",
"step-4": "from models import Sensor\nimport mysql.connector as mariadb\nmariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\ncursor = mariadb_connection.cursor()\n\n\ndef closeConnection():\n cursor.close()\n mariadb_connection.close()\n return\n\n\ndef getTasks(amount):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n all_data = []\n cursor.execute('SELECT * FROM Sensor')\n all_entries = cursor.fetchall()\n for row in all_entries:\n entry = Sensor(row[0], row[1], row[2])\n all_data.append(entry.data)\n closeConnection()\n return all_data\n\n\ndef getTask(task_id):\n mariadb_connection = mariadb.connect(user='web', password='raspberry',\n database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n cursor.execute('SELECT * FROM Sensor WHERE ID={}'.format(task_id))\n entry = cursor.fetchall()\n data = Sensor(entry[0][0], entry[0][1], entry[0][2])\n closeConnection()\n return data.data\n",
"step-5": "from models import Sensor\nimport mysql.connector as mariadb\n\n## CREATE A DB WITH MARIADB ##\nmariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')\ncursor = mariadb_connection.cursor()\n\ndef closeConnection():\n cursor.close()\n mariadb_connection.close()\n return\n\ndef getTasks(amount):\n mariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n all_data = []\n cursor.execute(\"SELECT * FROM Sensor\")\n all_entries = cursor.fetchall()\n\n for row in all_entries:\n entry = Sensor(row[0], row[1], row[2])\n all_data.append(entry.data)\n\n closeConnection()\n return all_data\n\ndef getTask(task_id):\n mariadb_connection = mariadb.connect(user='web', password='raspberry', database='PlantHubDB')\n cursor = mariadb_connection.cursor()\n cursor.execute(\"SELECT * FROM Sensor WHERE ID={}\".format(task_id))\n entry = cursor.fetchall()\n\n data = Sensor(entry[0][0], entry[0][1], entry[0][2])\n\n closeConnection()\n return data.data\n ",
"step-ids": [
0,
3,
4,
5,
6
]
}
|
[
0,
3,
4,
5,
6
] |
from web3 import Web3, HTTPProvider, IPCProvider
from tcmb.tcmb_parser import TCMB_Processor
from ecb.ecb_parser import ECB_Processor
from web3.contract import ConciseContract
from web3.middleware import geth_poa_middleware
import json
import time
tcmb_currencies = ["TRY", "USD", "AUD", "DKK", "EUR", "GBP", "CHF", "SEK", "CAD",
"KWD", "NOK", "SAR", "JPY", "BGN", "RON", "RUB", "IRR", "CNY", "PKR"]
ecb_currencies = ["EUR", "USD", "JPY", "BGN", "CZK", "DKK", "GBP", "HUF", "PLN",
"RON", "SEK", "CHF", "ISK", "NOK", "HRK", "RUB", "TRY", "AUD", "BRL",
"CAD", "CNY", "HKD", "IDR", "ILS", "INR", "KRW", "MXN", "MYR", "NZD",
"PHP", "SGD", "THB", "ZAR"]
def epoch_day(epoch_time):
epoch_time = int(epoch_time)
return(epoch_time - (epoch_time % 86400))
with open('config_ebloc.json') as json_data_file:
config_data = json.load(json_data_file)
owner_address = config_data["owner"]["address"]
owner_password = config_data["owner"]["password"]
contract_address = config_data["contract"]["address"]
contract_abi = config_data["contract"]["abi"]
gas = int(config_data["price"]["gas"])
gas_price = Web3.toWei( int(config_data["price"]["gas_price"]), 'gwei')
ecb_daily_log_path = config_data["log"]["ecb_daily"]
tcmb_daily_log_path = config_data["log"]["tcmb_daily"]
geth_ipc_path = config_data["geth"]["geth_ipc_path"]
contract_address = Web3.toChecksumAddress(contract_address)
web3 = Web3(IPCProvider(geth_ipc_path))
web3.middleware_stack.inject(geth_poa_middleware, layer=0)
web3.eth.defaultAccount = web3.eth.accounts[0]
web3.personal.unlockAccount(web3.eth.accounts[0], owner_password)
contract_instance = web3.eth.contract(abi=contract_abi, address=contract_address, ContractFactoryClass=ConciseContract)
unix_time = Web3.toInt(epoch_day(time.time()))
def add_ecb():
unix_time = Web3.toInt(epoch_day(time.time()))
ECB = ECB_Processor()
f = open(ecb_daily_log_path, "a")
if(time.strftime("%Y-%m-%d") == ECB.Currency_Dict["time"]):
for curr in ecb_currencies:
curr_code = bytes(curr, encoding='utf-8')
curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr])*(10**9)))
tx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]})
tx_hash = tx_hash.hex()
print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash, curr_code, file=f)
else:
print(time.strftime("%Y-%m-%d %H:%M"), unix_time, "Weekend", file=f)
f.close()
def add_tcmb():
unix_time = Web3.toInt(epoch_day(time.time()))
TCMB = TCMB_Processor()
f = open(tcmb_daily_log_path, "a")
if(time.strftime("%m/%d/%Y") == TCMB.CURRENCY_DICT["Date"]):
for curr in tcmb_currencies:
curr_code = bytes(curr, encoding='utf-8')
curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr]["ForexBuying"])*(10**9)))
curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr]["ForexSelling"])*(10**9)))
# forex buying
tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth.accounts[0]})
tx_hash_fb = tx_hash_fb.hex()
print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash_fb, curr_code, file=f)
# forex selling
tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth.accounts[0]})
tx_hash_fs = tx_hash_fs.hex()
print(time.strftime("%Y-%m-%d %H:%M"), unix_time, tx_hash_fs, curr_code, file=f)
else:
print(time.strftime("%Y-%m-%d %H:%M"), unix_time, "Weekend", file=f)
f.close()
if __name__ == "__main__":
add_ecb()
add_tcmb()
print(time.strftime("%Y-%m-%d %H:%M"), " DONE EBLOC add_ecb & add_tcmb")
|
normal
|
{
"blob_id": "ecd5097d9d497b62b89217ee3c46506f21fc15d2",
"index": 5065,
"step-1": "<mask token>\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\n<mask token>\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\n<mask token>\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\n<mask token>\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\n<mask token>\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n",
"step-3": "<mask token>\ntcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK',\n 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR'\n ]\necb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF',\n 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD',\n 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR',\n 'NZD', 'PHP', 'SGD', 'THB', 'ZAR']\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\nowner_address = config_data['owner']['address']\nowner_password = config_data['owner']['password']\ncontract_address = config_data['contract']['address']\ncontract_abi = config_data['contract']['abi']\ngas = int(config_data['price']['gas'])\ngas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei')\necb_daily_log_path = config_data['log']['ecb_daily']\ntcmb_daily_log_path = config_data['log']['tcmb_daily']\ngeth_ipc_path = config_data['geth']['geth_ipc_path']\ncontract_address = Web3.toChecksumAddress(contract_address)\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\ncontract_instance = web3.eth.contract(abi=contract_abi, address=\n contract_address, ContractFactoryClass=ConciseContract)\nunix_time = Web3.toInt(epoch_day(time.time()))\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n",
"step-4": "from web3 import Web3, HTTPProvider, IPCProvider\nfrom tcmb.tcmb_parser import TCMB_Processor\nfrom ecb.ecb_parser import ECB_Processor\nfrom web3.contract import ConciseContract\nfrom web3.middleware import geth_poa_middleware\nimport json\nimport time\ntcmb_currencies = ['TRY', 'USD', 'AUD', 'DKK', 'EUR', 'GBP', 'CHF', 'SEK',\n 'CAD', 'KWD', 'NOK', 'SAR', 'JPY', 'BGN', 'RON', 'RUB', 'IRR', 'CNY', 'PKR'\n ]\necb_currencies = ['EUR', 'USD', 'JPY', 'BGN', 'CZK', 'DKK', 'GBP', 'HUF',\n 'PLN', 'RON', 'SEK', 'CHF', 'ISK', 'NOK', 'HRK', 'RUB', 'TRY', 'AUD',\n 'BRL', 'CAD', 'CNY', 'HKD', 'IDR', 'ILS', 'INR', 'KRW', 'MXN', 'MYR',\n 'NZD', 'PHP', 'SGD', 'THB', 'ZAR']\n\n\ndef epoch_day(epoch_time):\n epoch_time = int(epoch_time)\n return epoch_time - epoch_time % 86400\n\n\nwith open('config_ebloc.json') as json_data_file:\n config_data = json.load(json_data_file)\nowner_address = config_data['owner']['address']\nowner_password = config_data['owner']['password']\ncontract_address = config_data['contract']['address']\ncontract_abi = config_data['contract']['abi']\ngas = int(config_data['price']['gas'])\ngas_price = Web3.toWei(int(config_data['price']['gas_price']), 'gwei')\necb_daily_log_path = config_data['log']['ecb_daily']\ntcmb_daily_log_path = config_data['log']['tcmb_daily']\ngeth_ipc_path = config_data['geth']['geth_ipc_path']\ncontract_address = Web3.toChecksumAddress(contract_address)\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\ncontract_instance = web3.eth.contract(abi=contract_abi, address=\n contract_address, ContractFactoryClass=ConciseContract)\nunix_time = Web3.toInt(epoch_day(time.time()))\n\n\ndef add_ecb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n ECB = ECB_Processor()\n f = open(ecb_daily_log_path, 'a')\n if time.strftime('%Y-%m-%d') == ECB.Currency_Dict['time']:\n for curr in ecb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value = web3.toInt(int(float(ECB.Currency_Dict[curr]) * 10 **\n 9))\n tx_hash = contract_instance.add_ecb(unix_time, curr_code,\n curr_value, transact={'from': web3.eth.accounts[0]})\n tx_hash = tx_hash.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\ndef add_tcmb():\n unix_time = Web3.toInt(epoch_day(time.time()))\n TCMB = TCMB_Processor()\n f = open(tcmb_daily_log_path, 'a')\n if time.strftime('%m/%d/%Y') == TCMB.CURRENCY_DICT['Date']:\n for curr in tcmb_currencies:\n curr_code = bytes(curr, encoding='utf-8')\n curr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexBuying']) * 10 ** 9))\n curr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\n 'ForexSelling']) * 10 ** 9))\n tx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time,\n curr_code, curr_value_fb, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fb = tx_hash_fb.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fb,\n curr_code, file=f)\n tx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time,\n curr_code, curr_value_fs, transact={'from': web3.eth.\n accounts[0]})\n tx_hash_fs = tx_hash_fs.hex()\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, tx_hash_fs,\n curr_code, file=f)\n else:\n print(time.strftime('%Y-%m-%d %H:%M'), unix_time, 'Weekend', file=f)\n f.close()\n\n\nif __name__ == '__main__':\n add_ecb()\n add_tcmb()\n print(time.strftime('%Y-%m-%d %H:%M'), ' DONE EBLOC add_ecb & add_tcmb')\n",
"step-5": "from web3 import Web3, HTTPProvider, IPCProvider\nfrom tcmb.tcmb_parser import TCMB_Processor\nfrom ecb.ecb_parser import ECB_Processor\nfrom web3.contract import ConciseContract\nfrom web3.middleware import geth_poa_middleware\nimport json\nimport time\n\ntcmb_currencies = [\"TRY\", \"USD\", \"AUD\", \"DKK\", \"EUR\", \"GBP\", \"CHF\", \"SEK\", \"CAD\", \n\t\t\"KWD\", \"NOK\", \"SAR\", \"JPY\", \"BGN\", \"RON\", \"RUB\", \"IRR\", \"CNY\", \"PKR\"]\n\necb_currencies = [\"EUR\", \"USD\", \"JPY\", \"BGN\", \"CZK\", \"DKK\", \"GBP\", \"HUF\", \"PLN\", \n\t\t\"RON\", \"SEK\", \"CHF\", \"ISK\", \"NOK\", \"HRK\", \"RUB\", \"TRY\", \"AUD\", \"BRL\", \n\t\t\"CAD\", \"CNY\", \"HKD\", \"IDR\", \"ILS\", \"INR\", \"KRW\", \"MXN\", \"MYR\", \"NZD\", \n\t\t\"PHP\", \"SGD\", \"THB\", \"ZAR\"]\n\ndef epoch_day(epoch_time):\n\tepoch_time = int(epoch_time)\n\treturn(epoch_time - (epoch_time % 86400))\n\nwith open('config_ebloc.json') as json_data_file:\n\tconfig_data = json.load(json_data_file)\n\nowner_address = config_data[\"owner\"][\"address\"]\nowner_password = config_data[\"owner\"][\"password\"]\ncontract_address = config_data[\"contract\"][\"address\"]\ncontract_abi = config_data[\"contract\"][\"abi\"]\ngas = int(config_data[\"price\"][\"gas\"])\ngas_price = Web3.toWei( int(config_data[\"price\"][\"gas_price\"]), 'gwei')\necb_daily_log_path = config_data[\"log\"][\"ecb_daily\"]\ntcmb_daily_log_path = config_data[\"log\"][\"tcmb_daily\"]\ngeth_ipc_path = config_data[\"geth\"][\"geth_ipc_path\"]\n\ncontract_address = Web3.toChecksumAddress(contract_address)\n\nweb3 = Web3(IPCProvider(geth_ipc_path))\nweb3.middleware_stack.inject(geth_poa_middleware, layer=0)\n\nweb3.eth.defaultAccount = web3.eth.accounts[0]\nweb3.personal.unlockAccount(web3.eth.accounts[0], owner_password)\n\ncontract_instance = web3.eth.contract(abi=contract_abi, address=contract_address, ContractFactoryClass=ConciseContract)\n\nunix_time = Web3.toInt(epoch_day(time.time()))\n\ndef add_ecb():\n\tunix_time = Web3.toInt(epoch_day(time.time()))\n\tECB = ECB_Processor()\n\tf = open(ecb_daily_log_path, \"a\")\n\tif(time.strftime(\"%Y-%m-%d\") == ECB.Currency_Dict[\"time\"]):\n\t\tfor curr in ecb_currencies:\n\t\t\tcurr_code = bytes(curr, encoding='utf-8')\n\t\t\tcurr_value = web3.toInt(int(float(ECB.Currency_Dict[curr])*(10**9)))\n\t\t\ttx_hash = contract_instance.add_ecb(unix_time, curr_code, curr_value, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash = tx_hash.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash, curr_code, file=f)\n\telse:\n\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, \"Weekend\", file=f)\n\tf.close()\n\ndef add_tcmb():\n\tunix_time = Web3.toInt(epoch_day(time.time()))\n\tTCMB = TCMB_Processor()\n\tf = open(tcmb_daily_log_path, \"a\")\n\tif(time.strftime(\"%m/%d/%Y\") == TCMB.CURRENCY_DICT[\"Date\"]):\n\t\tfor curr in tcmb_currencies:\n\t\t\tcurr_code = bytes(curr, encoding='utf-8')\n\t\t\tcurr_value_fb = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\"ForexBuying\"])*(10**9)))\n\t\t\tcurr_value_fs = web3.toInt(int(float(TCMB.CURRENCY_DICT[curr][\"ForexSelling\"])*(10**9)))\n\t\t\t# forex buying\n\t\t\ttx_hash_fb = contract_instance.add_tcmb_forexbuying(unix_time, curr_code, curr_value_fb, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash_fb = tx_hash_fb.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash_fb, curr_code, file=f)\n\t\t\t# forex selling\n\t\t\ttx_hash_fs = contract_instance.add_tcmb_forexselling(unix_time, curr_code, curr_value_fs, transact={'from': web3.eth.accounts[0]})\n\t\t\ttx_hash_fs = tx_hash_fs.hex()\n\t\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, tx_hash_fs, curr_code, file=f)\n\telse:\n\t\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), unix_time, \"Weekend\", file=f)\n\tf.close()\n\n\nif __name__ == \"__main__\":\n\tadd_ecb()\n\tadd_tcmb()\n\tprint(time.strftime(\"%Y-%m-%d %H:%M\"), \" DONE EBLOC add_ecb & add_tcmb\")",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
"""GI on fast."""
import logging
from mpf.core.utility_functions import Util
from mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface
class FASTGIString(GIPlatformInterface):
"""A FAST GI string in a WPC machine."""
def __init__(self, number, sender):
"""Initialise GI string.
TODO: Need to implement the enable_relay and control which strings are
dimmable.
"""
self.log = logging.getLogger('FASTGIString.0x' + str(number))
self.number = number
self.send = sender
def off(self):
"""Turn off GI string."""
self.log.debug("Turning Off GI String")
self.send('GI:' + self.number + ',00')
def on(self, brightness=255):
"""Turn on GI string."""
if brightness >= 255:
brightness = 255
self.log.debug("Turning On GI String to brightness %s", brightness)
# self.send('GI:' + self.number + ',' + Util.int_to_hex_string(brightness))
self.send('GI:{},{}'.format(self.number,
Util.int_to_hex_string(brightness)))
|
normal
|
{
"blob_id": "91cf6d08be2ad86c08de4dd48b2f35dedc55b4bb",
"index": 2177,
"step-1": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n <mask token>\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n <mask token>\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-2": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n <mask token>\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-3": "<mask token>\n\n\nclass FASTGIString(GIPlatformInterface):\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-4": "<mask token>\nimport logging\nfrom mpf.core.utility_functions import Util\nfrom mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface\n\n\nclass FASTGIString(GIPlatformInterface):\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug('Turning Off GI String')\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n self.log.debug('Turning On GI String to brightness %s', brightness)\n self.send('GI:{},{}'.format(self.number, Util.int_to_hex_string(\n brightness)))\n",
"step-5": "\"\"\"GI on fast.\"\"\"\nimport logging\n\nfrom mpf.core.utility_functions import Util\nfrom mpf.platforms.interfaces.gi_platform_interface import GIPlatformInterface\n\n\nclass FASTGIString(GIPlatformInterface):\n\n \"\"\"A FAST GI string in a WPC machine.\"\"\"\n\n def __init__(self, number, sender):\n \"\"\"Initialise GI string.\n\n TODO: Need to implement the enable_relay and control which strings are\n dimmable.\n \"\"\"\n self.log = logging.getLogger('FASTGIString.0x' + str(number))\n self.number = number\n self.send = sender\n\n def off(self):\n \"\"\"Turn off GI string.\"\"\"\n self.log.debug(\"Turning Off GI String\")\n self.send('GI:' + self.number + ',00')\n\n def on(self, brightness=255):\n \"\"\"Turn on GI string.\"\"\"\n if brightness >= 255:\n brightness = 255\n\n self.log.debug(\"Turning On GI String to brightness %s\", brightness)\n # self.send('GI:' + self.number + ',' + Util.int_to_hex_string(brightness))\n\n self.send('GI:{},{}'.format(self.number,\n Util.int_to_hex_string(brightness)))\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import cv2
print(cv2.__version__)
image = cv2.imread("download.jpeg", 1)
print(image)
print(image.shape)
print(image[0])
print("~~~~~~~~~~~~~~~")
print(image.shape[0])
print("~~~~~~~~~~~~~~~")
print(len(image))
|
normal
|
{
"blob_id": "0b0ae6101fd80bdbcf37b935268f3e49230599fb",
"index": 5715,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(cv2.__version__)\n<mask token>\nprint(image)\nprint(image.shape)\nprint(image[0])\nprint('~~~~~~~~~~~~~~~')\nprint(image.shape[0])\nprint('~~~~~~~~~~~~~~~')\nprint(len(image))\n",
"step-3": "<mask token>\nprint(cv2.__version__)\nimage = cv2.imread('download.jpeg', 1)\nprint(image)\nprint(image.shape)\nprint(image[0])\nprint('~~~~~~~~~~~~~~~')\nprint(image.shape[0])\nprint('~~~~~~~~~~~~~~~')\nprint(len(image))\n",
"step-4": "import cv2\nprint(cv2.__version__)\nimage = cv2.imread('download.jpeg', 1)\nprint(image)\nprint(image.shape)\nprint(image[0])\nprint('~~~~~~~~~~~~~~~')\nprint(image.shape[0])\nprint('~~~~~~~~~~~~~~~')\nprint(len(image))\n",
"step-5": "import cv2\nprint(cv2.__version__)\n\nimage = cv2.imread(\"download.jpeg\", 1)\nprint(image)\nprint(image.shape)\n\nprint(image[0])\nprint(\"~~~~~~~~~~~~~~~\")\nprint(image.shape[0])\nprint(\"~~~~~~~~~~~~~~~\")\nprint(len(image))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import json
subjects = []
with open("sub.json", 'r') as subject_file:
subjects = json.load(subject_file)
print(json.dumps(subjects, separators=(',',':')))
|
normal
|
{
"blob_id": "98bd4eb25a76fb9184f9abfcb920a6fbe46b9394",
"index": 631,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-3": "<mask token>\nsubjects = []\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-4": "import json\nsubjects = []\nwith open('sub.json', 'r') as subject_file:\n subjects = json.load(subject_file)\nprint(json.dumps(subjects, separators=(',', ':')))\n",
"step-5": "import json\n\nsubjects = []\n\nwith open(\"sub.json\", 'r') as subject_file:\n\tsubjects = json.load(subject_file)\n\nprint(json.dumps(subjects, separators=(',',':')))\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
__title__ = 'FUCKTHEINTRUDERS'
__description__ = 'Checking for Intruders in my locality'
__version__ = '0.0.1'
__author__ = 'Shivam Jalotra'
__email__ = '[email protected]'
__license__ = 'MIT 1.0'
|
normal
|
{
"blob_id": "ba94a69ac356969ab593afc922a2517f4713771f",
"index": 5536,
"step-1": "<mask token>\n",
"step-2": "__title__ = 'FUCKTHEINTRUDERS'\n__description__ = 'Checking for Intruders in my locality'\n__version__ = '0.0.1'\n__author__ = 'Shivam Jalotra'\n__email__ = '[email protected]'\n__license__ = 'MIT 1.0'\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import items
import grupo
class Conexion:
def __init__(self, direccion, destino):
self.set_direccion(direccion)
self.set_destino(destino)
def __repr__(self):
return str(self.direccion()) + ' => ' + str(self.destino())
def direccion(self):
return self._direccion
def set_direccion(self, direccion):
self._direccion = direccion
def destino(self):
return self._destino
def set_destino(self, destino):
self._destino = destino
class GrupoConexiones(grupo.Grupo):
def conexiones(self):
return self.coleccion()
def conecta_al(self, direccion):
for conexion in self.conexiones():
if conexion.direccion() == direccion:
return conexion.destino()
return localidad_nula
class Localidad:
def __init__(self, nombre, descripcion, conexiones=None, contiene=None):
self.set_nombre(nombre)
self.set_descripcion(descripcion)
self._conexiones = GrupoConexiones(conexiones)
self._grupo_items = items.GrupoItems(contiene)
def __repr__(self):
return self.nombre()
def nombre(self):
return self._nombre
def set_nombre(self, nombre):
self._nombre = nombre
def descripcion(self):
return self._descripcion
def set_descripcion(self, descripcion):
self._descripcion = descripcion
def conexiones(self):
return self._conexiones
def items(self):
return self._grupo_items
def describir(self):
print(self.nombre())
print(self.descripcion())
if not self.items().esta_vacio():
print('También puedes ver:')
for item in self.items():
print('-', item.nombre())
def conecta_con(self, iterable):
self.conexiones().meter_masivo(iterable)
def conecta_al(self, direccion):
return self.conexiones().conecta_al(direccion)
def meter_conexion(self, conexion):
self.conexiones().meter(conexion)
def contiene_token(self, token):
return self.items().contiene_token(token)
# def meter_item(self, item):
# self._grupo_items.meter(item)
# def sacar_item(self, item):
# self._grupo_items.sacar(item)
# def contiene_item(self, item):
# return self._grupo_items.contiene(item)
# def tiene_items(self):
# return self._grupo_items.esta_vacio()
localidad_nula = Localidad('NULA', 'Localidad nula.')
|
normal
|
{
"blob_id": "f59e61977f7c72ab191aadccbd72d23f831b3a1c",
"index": 7050,
"step-1": "<mask token>\n\n\nclass Conexion:\n\n def __init__(self, direccion, destino):\n self.set_direccion(direccion)\n self.set_destino(destino)\n <mask token>\n <mask token>\n\n def set_direccion(self, direccion):\n self._direccion = direccion\n <mask token>\n <mask token>\n\n\nclass GrupoConexiones(grupo.Grupo):\n\n def conexiones(self):\n return self.coleccion()\n\n def conecta_al(self, direccion):\n for conexion in self.conexiones():\n if conexion.direccion() == direccion:\n return conexion.destino()\n return localidad_nula\n\n\nclass Localidad:\n\n def __init__(self, nombre, descripcion, conexiones=None, contiene=None):\n self.set_nombre(nombre)\n self.set_descripcion(descripcion)\n self._conexiones = GrupoConexiones(conexiones)\n self._grupo_items = items.GrupoItems(contiene)\n\n def __repr__(self):\n return self.nombre()\n\n def nombre(self):\n return self._nombre\n\n def set_nombre(self, nombre):\n self._nombre = nombre\n\n def descripcion(self):\n return self._descripcion\n\n def set_descripcion(self, descripcion):\n self._descripcion = descripcion\n\n def conexiones(self):\n return self._conexiones\n\n def items(self):\n return self._grupo_items\n\n def describir(self):\n print(self.nombre())\n print(self.descripcion())\n if not self.items().esta_vacio():\n print('También puedes ver:')\n for item in self.items():\n print('-', item.nombre())\n\n def conecta_con(self, iterable):\n self.conexiones().meter_masivo(iterable)\n\n def conecta_al(self, direccion):\n return self.conexiones().conecta_al(direccion)\n\n def meter_conexion(self, conexion):\n self.conexiones().meter(conexion)\n\n def contiene_token(self, token):\n return self.items().contiene_token(token)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Conexion:\n\n def __init__(self, direccion, destino):\n self.set_direccion(direccion)\n self.set_destino(destino)\n <mask token>\n\n def direccion(self):\n return self._direccion\n\n def set_direccion(self, direccion):\n self._direccion = direccion\n <mask token>\n\n def set_destino(self, destino):\n self._destino = destino\n\n\nclass GrupoConexiones(grupo.Grupo):\n\n def conexiones(self):\n return self.coleccion()\n\n def conecta_al(self, direccion):\n for conexion in self.conexiones():\n if conexion.direccion() == direccion:\n return conexion.destino()\n return localidad_nula\n\n\nclass Localidad:\n\n def __init__(self, nombre, descripcion, conexiones=None, contiene=None):\n self.set_nombre(nombre)\n self.set_descripcion(descripcion)\n self._conexiones = GrupoConexiones(conexiones)\n self._grupo_items = items.GrupoItems(contiene)\n\n def __repr__(self):\n return self.nombre()\n\n def nombre(self):\n return self._nombre\n\n def set_nombre(self, nombre):\n self._nombre = nombre\n\n def descripcion(self):\n return self._descripcion\n\n def set_descripcion(self, descripcion):\n self._descripcion = descripcion\n\n def conexiones(self):\n return self._conexiones\n\n def items(self):\n return self._grupo_items\n\n def describir(self):\n print(self.nombre())\n print(self.descripcion())\n if not self.items().esta_vacio():\n print('También puedes ver:')\n for item in self.items():\n print('-', item.nombre())\n\n def conecta_con(self, iterable):\n self.conexiones().meter_masivo(iterable)\n\n def conecta_al(self, direccion):\n return self.conexiones().conecta_al(direccion)\n\n def meter_conexion(self, conexion):\n self.conexiones().meter(conexion)\n\n def contiene_token(self, token):\n return self.items().contiene_token(token)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Conexion:\n\n def __init__(self, direccion, destino):\n self.set_direccion(direccion)\n self.set_destino(destino)\n\n def __repr__(self):\n return str(self.direccion()) + ' => ' + str(self.destino())\n\n def direccion(self):\n return self._direccion\n\n def set_direccion(self, direccion):\n self._direccion = direccion\n\n def destino(self):\n return self._destino\n\n def set_destino(self, destino):\n self._destino = destino\n\n\nclass GrupoConexiones(grupo.Grupo):\n\n def conexiones(self):\n return self.coleccion()\n\n def conecta_al(self, direccion):\n for conexion in self.conexiones():\n if conexion.direccion() == direccion:\n return conexion.destino()\n return localidad_nula\n\n\nclass Localidad:\n\n def __init__(self, nombre, descripcion, conexiones=None, contiene=None):\n self.set_nombre(nombre)\n self.set_descripcion(descripcion)\n self._conexiones = GrupoConexiones(conexiones)\n self._grupo_items = items.GrupoItems(contiene)\n\n def __repr__(self):\n return self.nombre()\n\n def nombre(self):\n return self._nombre\n\n def set_nombre(self, nombre):\n self._nombre = nombre\n\n def descripcion(self):\n return self._descripcion\n\n def set_descripcion(self, descripcion):\n self._descripcion = descripcion\n\n def conexiones(self):\n return self._conexiones\n\n def items(self):\n return self._grupo_items\n\n def describir(self):\n print(self.nombre())\n print(self.descripcion())\n if not self.items().esta_vacio():\n print('También puedes ver:')\n for item in self.items():\n print('-', item.nombre())\n\n def conecta_con(self, iterable):\n self.conexiones().meter_masivo(iterable)\n\n def conecta_al(self, direccion):\n return self.conexiones().conecta_al(direccion)\n\n def meter_conexion(self, conexion):\n self.conexiones().meter(conexion)\n\n def contiene_token(self, token):\n return self.items().contiene_token(token)\n\n\nlocalidad_nula = Localidad('NULA', 'Localidad nula.')\n",
"step-4": "import items\nimport grupo\n\n\nclass Conexion:\n\n def __init__(self, direccion, destino):\n self.set_direccion(direccion)\n self.set_destino(destino)\n\n def __repr__(self):\n return str(self.direccion()) + ' => ' + str(self.destino())\n\n def direccion(self):\n return self._direccion\n\n def set_direccion(self, direccion):\n self._direccion = direccion\n\n def destino(self):\n return self._destino\n\n def set_destino(self, destino):\n self._destino = destino\n\n\nclass GrupoConexiones(grupo.Grupo):\n\n def conexiones(self):\n return self.coleccion()\n\n def conecta_al(self, direccion):\n for conexion in self.conexiones():\n if conexion.direccion() == direccion:\n return conexion.destino()\n return localidad_nula\n\n\nclass Localidad:\n\n def __init__(self, nombre, descripcion, conexiones=None, contiene=None):\n self.set_nombre(nombre)\n self.set_descripcion(descripcion)\n self._conexiones = GrupoConexiones(conexiones)\n self._grupo_items = items.GrupoItems(contiene)\n\n def __repr__(self):\n return self.nombre()\n\n def nombre(self):\n return self._nombre\n\n def set_nombre(self, nombre):\n self._nombre = nombre\n\n def descripcion(self):\n return self._descripcion\n\n def set_descripcion(self, descripcion):\n self._descripcion = descripcion\n\n def conexiones(self):\n return self._conexiones\n\n def items(self):\n return self._grupo_items\n\n def describir(self):\n print(self.nombre())\n print(self.descripcion())\n if not self.items().esta_vacio():\n print('También puedes ver:')\n for item in self.items():\n print('-', item.nombre())\n\n def conecta_con(self, iterable):\n self.conexiones().meter_masivo(iterable)\n\n def conecta_al(self, direccion):\n return self.conexiones().conecta_al(direccion)\n\n def meter_conexion(self, conexion):\n self.conexiones().meter(conexion)\n\n def contiene_token(self, token):\n return self.items().contiene_token(token)\n\n\nlocalidad_nula = Localidad('NULA', 'Localidad nula.')\n",
"step-5": "import items\nimport grupo\n\nclass Conexion:\n def __init__(self, direccion, destino):\n self.set_direccion(direccion)\n self.set_destino(destino)\n\n def __repr__(self):\n return str(self.direccion()) + ' => ' + str(self.destino())\n\n def direccion(self):\n return self._direccion\n\n def set_direccion(self, direccion):\n self._direccion = direccion\n\n def destino(self):\n return self._destino\n\n def set_destino(self, destino):\n self._destino = destino\n\nclass GrupoConexiones(grupo.Grupo):\n def conexiones(self):\n return self.coleccion()\n\n def conecta_al(self, direccion):\n for conexion in self.conexiones():\n if conexion.direccion() == direccion:\n return conexion.destino()\n return localidad_nula\n\nclass Localidad:\n def __init__(self, nombre, descripcion, conexiones=None, contiene=None):\n self.set_nombre(nombre)\n self.set_descripcion(descripcion)\n self._conexiones = GrupoConexiones(conexiones)\n self._grupo_items = items.GrupoItems(contiene)\n\n def __repr__(self):\n return self.nombre()\n\n def nombre(self):\n return self._nombre\n\n def set_nombre(self, nombre):\n self._nombre = nombre\n\n def descripcion(self):\n return self._descripcion\n\n def set_descripcion(self, descripcion):\n self._descripcion = descripcion\n\n def conexiones(self):\n return self._conexiones\n\n def items(self):\n return self._grupo_items\n\n def describir(self):\n print(self.nombre())\n print(self.descripcion())\n if not self.items().esta_vacio():\n print('También puedes ver:')\n for item in self.items():\n print('-', item.nombre())\n\n def conecta_con(self, iterable):\n self.conexiones().meter_masivo(iterable)\n\n def conecta_al(self, direccion):\n return self.conexiones().conecta_al(direccion)\n\n def meter_conexion(self, conexion):\n self.conexiones().meter(conexion)\n\n def contiene_token(self, token):\n return self.items().contiene_token(token)\n\n # def meter_item(self, item):\n # self._grupo_items.meter(item)\n\n # def sacar_item(self, item):\n # self._grupo_items.sacar(item)\n\n # def contiene_item(self, item):\n # return self._grupo_items.contiene(item)\n\n # def tiene_items(self):\n # return self._grupo_items.esta_vacio()\n\nlocalidad_nula = Localidad('NULA', 'Localidad nula.')\n",
"step-ids": [
20,
22,
25,
26,
27
]
}
|
[
20,
22,
25,
26,
27
] |
# VGGNet
import numpy as np
np.random.seed(317)
from glob import glob
from itertools import cycle
from keras.applications.vgg19 import VGG19
from keras.optimizers import Adam
from keras.models import Model
from keras.layers import Input, BatchNormalization, Flatten, Dropout, Dense
from keras.utils import plot_model
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, CSVLogger, EarlyStopping, Callback
from keras.losses import kullback_leibler_divergence
from math import ceil
from os import path, mkdir, listdir
from skimage.transform import resize
from scipy.misc import imread, imsave
from time import time
import argparse
import logging
import keras.backend as K
import pandas as pd
import tifffile as tif
import sys
sys.path.append('.')
from planet.utils.data_utils import tagset_to_ints, random_transforms
from planet.utils.keras_utils import HistoryPlot
from planet.utils.runtime import funcname
class VGGNet(object):
def __init__(self, checkpoint_name='VGGNet'):
self.config = {
'image_shape': [256, 256, 3],
'input_shape': [224, 224, 3],
'output_shape': [17, ],
'batch_size': 60,
'trn_steps': 680,
'trn_nb_epochs': 200,
'trn_transform': True,
'trn_imgs_csv': 'data/train_v2.csv',
'trn_imgs_dir': 'data/train-jpg',
'tst_imgs_csv': 'data/sample_submission_v2.csv',
'tst_imgs_dir': 'data/test-jpg'
}
self.checkpoint_name = checkpoint_name
self.imgs = []
self.lbls = []
self.net = None
self.rng = np.random
@property
def cpdir(self):
cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str(x) for x in self.config['input_shape']]))
if not path.exists(cpdir):
mkdir(cpdir)
return cpdir
def create_net(self):
x = inputs = Input(shape=self.config['input_shape'])
vgg = VGG19(include_top=False, input_tensor=x)
outputs = Flatten()(vgg.output)
outputs = Dropout(0.1)(outputs)
outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(outputs)
def true_pos(yt, yp):
return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))
def pred_pos(yt, yp):
return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))
def F2(yt, yp):
yt, yp = K.round(yt), K.round(yp)
tp = K.sum(yt * yp)
fp = K.sum(K.clip(yp - yt, 0, 1))
fn = K.sum(K.clip(yt - yp, 0, 1))
p = tp / (tp + fp)
r = tp / (tp + fn)
b = 2.0
return (1 + b**2) * ((p * r) / (b**2 * p + r + K.epsilon()))
self.net = Model(inputs, outputs)
self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',
metrics=['binary_accuracy', F2, true_pos, pred_pos])
self.net.summary()
plot_model(self.net, to_file='%s/net.png' % self.cpdir)
return
def train(self):
batch_gen = self.train_batch_gen(self.config['trn_imgs_csv'], self.config[
'trn_imgs_dir'], self.config['trn_transform'])
cb = [
HistoryPlot('%s/history.png' % self.cpdir),
CSVLogger('%s/history.csv' % self.cpdir),
ModelCheckpoint('%s/loss.weights' % self.cpdir, monitor='loss', verbose=1,
save_best_only=True, mode='min', save_weights_only=True),
ModelCheckpoint('%s/F2.weights' % self.cpdir, monitor='F2',
verbose=1, save_best_only=True, mode='max', save_weights_only=True),
ReduceLROnPlateau(monitor='F2', factor=0.8, patience=2, epsilon=0.005, verbose=1, mode='min'),
EarlyStopping(monitor='F2', min_delta=0.01, patience=10, verbose=1, mode='max')
]
self.net.fit_generator(batch_gen, steps_per_epoch=self.config['trn_steps'], verbose=1, callbacks=cb,
epochs=self.config['trn_nb_epochs'], workers=2, pickle_safe=True)
return
def get_mean_img(self, imgs_paths, mean_img_path):
'''Compute the mean image from the given paths and save it to the given path.'''
logger = logging.getLogger(funcname())
if not path.exists(mean_img_path):
mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)
for idx, img_path in enumerate(imgs_paths):
mean_img += imread(img_path, mode='RGB').astype(np.float32) / len(imgs_paths)
if idx % 1000 == 0:
logger.info('%d/%d' % (idx, len(imgs_paths)))
imsave(mean_img_path, mean_img)
return imread(mean_img_path)
def train_batch_gen(self, imgs_csv, imgs_dir, transform):
logger = logging.getLogger(funcname())
# Read the CSV and extract image names and tags.
df = pd.read_csv(imgs_csv)
imgs_paths = ['%s/%s.jpg' % (imgs_dir, n) for n in df['image_name'].values]
tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]
# Compute the mean image for pre-processing.
mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' % self.cpdir)
mean_img = mean_img.astype(np.float32) / 255.
mean_img_mean = np.mean(mean_img)
img_preprocess = lambda img: img.astype(np.float32) / 255. - mean_img_mean
while True:
imgs_batch = np.zeros([self.config['batch_size'], ] + self.config['input_shape'])
tags_batch = np.zeros([self.config['batch_size'], ] + self.config['output_shape'])
random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)), len(imgs_paths)))
for batch_idx in range(self.config['batch_size']):
data_idx = next(random_idxs)
img = imread(imgs_paths[data_idx], mode='RGB')
img = img_preprocess(img)
img = resize(img, self.config['input_shape'], preserve_range=True, mode='constant')
if transform:
img = random_transforms(img, nb_min=0, nb_max=6)
imgs_batch[batch_idx] = img
tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])
yield imgs_batch, tags_batch
def predict(self, img_batch):
# Get the mean image
imgs_paths = listdir(self.config['trn_imgs_dir'])
mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir
mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.float32) / 255.
mean_img_mean = np.mean(mean_img)
img_preprocess = lambda img: img.astype(np.float32) / 255. - mean_img_mean
for idx in range(len(img_batch)):
img_batch[idx] = img_preprocess(img_batch[idx])
tags_pred = self.net.predict(img_batch)
tags_pred = tags_pred.round().astype(np.uint8)
return tags_pred
if __name__ == "__main__":
from planet.model_runner import model_runner
model = VGGNet()
model_runner(model)
|
normal
|
{
"blob_id": "c6a4d566460a06504abf7e2c54be4f2ea36e01fb",
"index": 7735,
"step-1": "<mask token>\n\n\nclass VGGNet(object):\n\n def __init__(self, checkpoint_name='VGGNet'):\n self.config = {'image_shape': [256, 256, 3], 'input_shape': [224, \n 224, 3], 'output_shape': [17], 'batch_size': 60, 'trn_steps': \n 680, 'trn_nb_epochs': 200, 'trn_transform': True,\n 'trn_imgs_csv': 'data/train_v2.csv', 'trn_imgs_dir':\n 'data/train-jpg', 'tst_imgs_csv':\n 'data/sample_submission_v2.csv', 'tst_imgs_dir': 'data/test-jpg'}\n self.checkpoint_name = checkpoint_name\n self.imgs = []\n self.lbls = []\n self.net = None\n self.rng = np.random\n\n @property\n def cpdir(self):\n cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str\n (x) for x in self.config['input_shape']]))\n if not path.exists(cpdir):\n mkdir(cpdir)\n return cpdir\n\n def create_net(self):\n x = inputs = Input(shape=self.config['input_shape'])\n vgg = VGG19(include_top=False, input_tensor=x)\n outputs = Flatten()(vgg.output)\n outputs = Dropout(0.1)(outputs)\n outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(\n outputs)\n\n def true_pos(yt, yp):\n return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))\n\n def pred_pos(yt, yp):\n return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))\n\n def F2(yt, yp):\n yt, yp = K.round(yt), K.round(yp)\n tp = K.sum(yt * yp)\n fp = K.sum(K.clip(yp - yt, 0, 1))\n fn = K.sum(K.clip(yt - yp, 0, 1))\n p = tp / (tp + fp)\n r = tp / (tp + fn)\n b = 2.0\n return (1 + b ** 2) * (p * r / (b ** 2 * p + r + K.epsilon()))\n self.net = Model(inputs, outputs)\n self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',\n metrics=['binary_accuracy', F2, true_pos, pred_pos])\n self.net.summary()\n plot_model(self.net, to_file='%s/net.png' % self.cpdir)\n return\n <mask token>\n\n def get_mean_img(self, imgs_paths, mean_img_path):\n \"\"\"Compute the mean image from the given paths and save it to the given path.\"\"\"\n logger = logging.getLogger(funcname())\n if not path.exists(mean_img_path):\n mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)\n for idx, img_path in enumerate(imgs_paths):\n mean_img += imread(img_path, mode='RGB').astype(np.float32\n ) / len(imgs_paths)\n if idx % 1000 == 0:\n logger.info('%d/%d' % (idx, len(imgs_paths)))\n imsave(mean_img_path, mean_img)\n return imread(mean_img_path)\n\n def train_batch_gen(self, imgs_csv, imgs_dir, transform):\n logger = logging.getLogger(funcname())\n df = pd.read_csv(imgs_csv)\n imgs_paths = [('%s/%s.jpg' % (imgs_dir, n)) for n in df[\n 'image_name'].values]\n tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]\n mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' %\n self.cpdir)\n mean_img = mean_img.astype(np.float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n while True:\n imgs_batch = np.zeros([self.config['batch_size']] + self.config\n ['input_shape'])\n tags_batch = np.zeros([self.config['batch_size']] + self.config\n ['output_shape'])\n random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)),\n len(imgs_paths)))\n for batch_idx in range(self.config['batch_size']):\n data_idx = next(random_idxs)\n img = imread(imgs_paths[data_idx], mode='RGB')\n img = img_preprocess(img)\n img = resize(img, self.config['input_shape'],\n preserve_range=True, mode='constant')\n if transform:\n img = random_transforms(img, nb_min=0, nb_max=6)\n imgs_batch[batch_idx] = img\n tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])\n yield imgs_batch, tags_batch\n\n def predict(self, img_batch):\n imgs_paths = listdir(self.config['trn_imgs_dir'])\n mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir\n mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.\n float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n for idx in range(len(img_batch)):\n img_batch[idx] = img_preprocess(img_batch[idx])\n tags_pred = self.net.predict(img_batch)\n tags_pred = tags_pred.round().astype(np.uint8)\n return tags_pred\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass VGGNet(object):\n\n def __init__(self, checkpoint_name='VGGNet'):\n self.config = {'image_shape': [256, 256, 3], 'input_shape': [224, \n 224, 3], 'output_shape': [17], 'batch_size': 60, 'trn_steps': \n 680, 'trn_nb_epochs': 200, 'trn_transform': True,\n 'trn_imgs_csv': 'data/train_v2.csv', 'trn_imgs_dir':\n 'data/train-jpg', 'tst_imgs_csv':\n 'data/sample_submission_v2.csv', 'tst_imgs_dir': 'data/test-jpg'}\n self.checkpoint_name = checkpoint_name\n self.imgs = []\n self.lbls = []\n self.net = None\n self.rng = np.random\n\n @property\n def cpdir(self):\n cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str\n (x) for x in self.config['input_shape']]))\n if not path.exists(cpdir):\n mkdir(cpdir)\n return cpdir\n\n def create_net(self):\n x = inputs = Input(shape=self.config['input_shape'])\n vgg = VGG19(include_top=False, input_tensor=x)\n outputs = Flatten()(vgg.output)\n outputs = Dropout(0.1)(outputs)\n outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(\n outputs)\n\n def true_pos(yt, yp):\n return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))\n\n def pred_pos(yt, yp):\n return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))\n\n def F2(yt, yp):\n yt, yp = K.round(yt), K.round(yp)\n tp = K.sum(yt * yp)\n fp = K.sum(K.clip(yp - yt, 0, 1))\n fn = K.sum(K.clip(yt - yp, 0, 1))\n p = tp / (tp + fp)\n r = tp / (tp + fn)\n b = 2.0\n return (1 + b ** 2) * (p * r / (b ** 2 * p + r + K.epsilon()))\n self.net = Model(inputs, outputs)\n self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',\n metrics=['binary_accuracy', F2, true_pos, pred_pos])\n self.net.summary()\n plot_model(self.net, to_file='%s/net.png' % self.cpdir)\n return\n\n def train(self):\n batch_gen = self.train_batch_gen(self.config['trn_imgs_csv'], self.\n config['trn_imgs_dir'], self.config['trn_transform'])\n cb = [HistoryPlot('%s/history.png' % self.cpdir), CSVLogger(\n '%s/history.csv' % self.cpdir), ModelCheckpoint(\n '%s/loss.weights' % self.cpdir, monitor='loss', verbose=1,\n save_best_only=True, mode='min', save_weights_only=True),\n ModelCheckpoint('%s/F2.weights' % self.cpdir, monitor='F2',\n verbose=1, save_best_only=True, mode='max', save_weights_only=\n True), ReduceLROnPlateau(monitor='F2', factor=0.8, patience=2,\n epsilon=0.005, verbose=1, mode='min'), EarlyStopping(monitor=\n 'F2', min_delta=0.01, patience=10, verbose=1, mode='max')]\n self.net.fit_generator(batch_gen, steps_per_epoch=self.config[\n 'trn_steps'], verbose=1, callbacks=cb, epochs=self.config[\n 'trn_nb_epochs'], workers=2, pickle_safe=True)\n return\n\n def get_mean_img(self, imgs_paths, mean_img_path):\n \"\"\"Compute the mean image from the given paths and save it to the given path.\"\"\"\n logger = logging.getLogger(funcname())\n if not path.exists(mean_img_path):\n mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)\n for idx, img_path in enumerate(imgs_paths):\n mean_img += imread(img_path, mode='RGB').astype(np.float32\n ) / len(imgs_paths)\n if idx % 1000 == 0:\n logger.info('%d/%d' % (idx, len(imgs_paths)))\n imsave(mean_img_path, mean_img)\n return imread(mean_img_path)\n\n def train_batch_gen(self, imgs_csv, imgs_dir, transform):\n logger = logging.getLogger(funcname())\n df = pd.read_csv(imgs_csv)\n imgs_paths = [('%s/%s.jpg' % (imgs_dir, n)) for n in df[\n 'image_name'].values]\n tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]\n mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' %\n self.cpdir)\n mean_img = mean_img.astype(np.float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n while True:\n imgs_batch = np.zeros([self.config['batch_size']] + self.config\n ['input_shape'])\n tags_batch = np.zeros([self.config['batch_size']] + self.config\n ['output_shape'])\n random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)),\n len(imgs_paths)))\n for batch_idx in range(self.config['batch_size']):\n data_idx = next(random_idxs)\n img = imread(imgs_paths[data_idx], mode='RGB')\n img = img_preprocess(img)\n img = resize(img, self.config['input_shape'],\n preserve_range=True, mode='constant')\n if transform:\n img = random_transforms(img, nb_min=0, nb_max=6)\n imgs_batch[batch_idx] = img\n tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])\n yield imgs_batch, tags_batch\n\n def predict(self, img_batch):\n imgs_paths = listdir(self.config['trn_imgs_dir'])\n mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir\n mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.\n float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n for idx in range(len(img_batch)):\n img_batch[idx] = img_preprocess(img_batch[idx])\n tags_pred = self.net.predict(img_batch)\n tags_pred = tags_pred.round().astype(np.uint8)\n return tags_pred\n\n\n<mask token>\n",
"step-3": "<mask token>\nnp.random.seed(317)\n<mask token>\nsys.path.append('.')\n<mask token>\n\n\nclass VGGNet(object):\n\n def __init__(self, checkpoint_name='VGGNet'):\n self.config = {'image_shape': [256, 256, 3], 'input_shape': [224, \n 224, 3], 'output_shape': [17], 'batch_size': 60, 'trn_steps': \n 680, 'trn_nb_epochs': 200, 'trn_transform': True,\n 'trn_imgs_csv': 'data/train_v2.csv', 'trn_imgs_dir':\n 'data/train-jpg', 'tst_imgs_csv':\n 'data/sample_submission_v2.csv', 'tst_imgs_dir': 'data/test-jpg'}\n self.checkpoint_name = checkpoint_name\n self.imgs = []\n self.lbls = []\n self.net = None\n self.rng = np.random\n\n @property\n def cpdir(self):\n cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str\n (x) for x in self.config['input_shape']]))\n if not path.exists(cpdir):\n mkdir(cpdir)\n return cpdir\n\n def create_net(self):\n x = inputs = Input(shape=self.config['input_shape'])\n vgg = VGG19(include_top=False, input_tensor=x)\n outputs = Flatten()(vgg.output)\n outputs = Dropout(0.1)(outputs)\n outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(\n outputs)\n\n def true_pos(yt, yp):\n return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))\n\n def pred_pos(yt, yp):\n return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))\n\n def F2(yt, yp):\n yt, yp = K.round(yt), K.round(yp)\n tp = K.sum(yt * yp)\n fp = K.sum(K.clip(yp - yt, 0, 1))\n fn = K.sum(K.clip(yt - yp, 0, 1))\n p = tp / (tp + fp)\n r = tp / (tp + fn)\n b = 2.0\n return (1 + b ** 2) * (p * r / (b ** 2 * p + r + K.epsilon()))\n self.net = Model(inputs, outputs)\n self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',\n metrics=['binary_accuracy', F2, true_pos, pred_pos])\n self.net.summary()\n plot_model(self.net, to_file='%s/net.png' % self.cpdir)\n return\n\n def train(self):\n batch_gen = self.train_batch_gen(self.config['trn_imgs_csv'], self.\n config['trn_imgs_dir'], self.config['trn_transform'])\n cb = [HistoryPlot('%s/history.png' % self.cpdir), CSVLogger(\n '%s/history.csv' % self.cpdir), ModelCheckpoint(\n '%s/loss.weights' % self.cpdir, monitor='loss', verbose=1,\n save_best_only=True, mode='min', save_weights_only=True),\n ModelCheckpoint('%s/F2.weights' % self.cpdir, monitor='F2',\n verbose=1, save_best_only=True, mode='max', save_weights_only=\n True), ReduceLROnPlateau(monitor='F2', factor=0.8, patience=2,\n epsilon=0.005, verbose=1, mode='min'), EarlyStopping(monitor=\n 'F2', min_delta=0.01, patience=10, verbose=1, mode='max')]\n self.net.fit_generator(batch_gen, steps_per_epoch=self.config[\n 'trn_steps'], verbose=1, callbacks=cb, epochs=self.config[\n 'trn_nb_epochs'], workers=2, pickle_safe=True)\n return\n\n def get_mean_img(self, imgs_paths, mean_img_path):\n \"\"\"Compute the mean image from the given paths and save it to the given path.\"\"\"\n logger = logging.getLogger(funcname())\n if not path.exists(mean_img_path):\n mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)\n for idx, img_path in enumerate(imgs_paths):\n mean_img += imread(img_path, mode='RGB').astype(np.float32\n ) / len(imgs_paths)\n if idx % 1000 == 0:\n logger.info('%d/%d' % (idx, len(imgs_paths)))\n imsave(mean_img_path, mean_img)\n return imread(mean_img_path)\n\n def train_batch_gen(self, imgs_csv, imgs_dir, transform):\n logger = logging.getLogger(funcname())\n df = pd.read_csv(imgs_csv)\n imgs_paths = [('%s/%s.jpg' % (imgs_dir, n)) for n in df[\n 'image_name'].values]\n tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]\n mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' %\n self.cpdir)\n mean_img = mean_img.astype(np.float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n while True:\n imgs_batch = np.zeros([self.config['batch_size']] + self.config\n ['input_shape'])\n tags_batch = np.zeros([self.config['batch_size']] + self.config\n ['output_shape'])\n random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)),\n len(imgs_paths)))\n for batch_idx in range(self.config['batch_size']):\n data_idx = next(random_idxs)\n img = imread(imgs_paths[data_idx], mode='RGB')\n img = img_preprocess(img)\n img = resize(img, self.config['input_shape'],\n preserve_range=True, mode='constant')\n if transform:\n img = random_transforms(img, nb_min=0, nb_max=6)\n imgs_batch[batch_idx] = img\n tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])\n yield imgs_batch, tags_batch\n\n def predict(self, img_batch):\n imgs_paths = listdir(self.config['trn_imgs_dir'])\n mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir\n mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.\n float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n for idx in range(len(img_batch)):\n img_batch[idx] = img_preprocess(img_batch[idx])\n tags_pred = self.net.predict(img_batch)\n tags_pred = tags_pred.round().astype(np.uint8)\n return tags_pred\n\n\nif __name__ == '__main__':\n from planet.model_runner import model_runner\n model = VGGNet()\n model_runner(model)\n",
"step-4": "import numpy as np\nnp.random.seed(317)\nfrom glob import glob\nfrom itertools import cycle\nfrom keras.applications.vgg19 import VGG19\nfrom keras.optimizers import Adam\nfrom keras.models import Model\nfrom keras.layers import Input, BatchNormalization, Flatten, Dropout, Dense\nfrom keras.utils import plot_model\nfrom keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, CSVLogger, EarlyStopping, Callback\nfrom keras.losses import kullback_leibler_divergence\nfrom math import ceil\nfrom os import path, mkdir, listdir\nfrom skimage.transform import resize\nfrom scipy.misc import imread, imsave\nfrom time import time\nimport argparse\nimport logging\nimport keras.backend as K\nimport pandas as pd\nimport tifffile as tif\nimport sys\nsys.path.append('.')\nfrom planet.utils.data_utils import tagset_to_ints, random_transforms\nfrom planet.utils.keras_utils import HistoryPlot\nfrom planet.utils.runtime import funcname\n\n\nclass VGGNet(object):\n\n def __init__(self, checkpoint_name='VGGNet'):\n self.config = {'image_shape': [256, 256, 3], 'input_shape': [224, \n 224, 3], 'output_shape': [17], 'batch_size': 60, 'trn_steps': \n 680, 'trn_nb_epochs': 200, 'trn_transform': True,\n 'trn_imgs_csv': 'data/train_v2.csv', 'trn_imgs_dir':\n 'data/train-jpg', 'tst_imgs_csv':\n 'data/sample_submission_v2.csv', 'tst_imgs_dir': 'data/test-jpg'}\n self.checkpoint_name = checkpoint_name\n self.imgs = []\n self.lbls = []\n self.net = None\n self.rng = np.random\n\n @property\n def cpdir(self):\n cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str\n (x) for x in self.config['input_shape']]))\n if not path.exists(cpdir):\n mkdir(cpdir)\n return cpdir\n\n def create_net(self):\n x = inputs = Input(shape=self.config['input_shape'])\n vgg = VGG19(include_top=False, input_tensor=x)\n outputs = Flatten()(vgg.output)\n outputs = Dropout(0.1)(outputs)\n outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(\n outputs)\n\n def true_pos(yt, yp):\n return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))\n\n def pred_pos(yt, yp):\n return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))\n\n def F2(yt, yp):\n yt, yp = K.round(yt), K.round(yp)\n tp = K.sum(yt * yp)\n fp = K.sum(K.clip(yp - yt, 0, 1))\n fn = K.sum(K.clip(yt - yp, 0, 1))\n p = tp / (tp + fp)\n r = tp / (tp + fn)\n b = 2.0\n return (1 + b ** 2) * (p * r / (b ** 2 * p + r + K.epsilon()))\n self.net = Model(inputs, outputs)\n self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',\n metrics=['binary_accuracy', F2, true_pos, pred_pos])\n self.net.summary()\n plot_model(self.net, to_file='%s/net.png' % self.cpdir)\n return\n\n def train(self):\n batch_gen = self.train_batch_gen(self.config['trn_imgs_csv'], self.\n config['trn_imgs_dir'], self.config['trn_transform'])\n cb = [HistoryPlot('%s/history.png' % self.cpdir), CSVLogger(\n '%s/history.csv' % self.cpdir), ModelCheckpoint(\n '%s/loss.weights' % self.cpdir, monitor='loss', verbose=1,\n save_best_only=True, mode='min', save_weights_only=True),\n ModelCheckpoint('%s/F2.weights' % self.cpdir, monitor='F2',\n verbose=1, save_best_only=True, mode='max', save_weights_only=\n True), ReduceLROnPlateau(monitor='F2', factor=0.8, patience=2,\n epsilon=0.005, verbose=1, mode='min'), EarlyStopping(monitor=\n 'F2', min_delta=0.01, patience=10, verbose=1, mode='max')]\n self.net.fit_generator(batch_gen, steps_per_epoch=self.config[\n 'trn_steps'], verbose=1, callbacks=cb, epochs=self.config[\n 'trn_nb_epochs'], workers=2, pickle_safe=True)\n return\n\n def get_mean_img(self, imgs_paths, mean_img_path):\n \"\"\"Compute the mean image from the given paths and save it to the given path.\"\"\"\n logger = logging.getLogger(funcname())\n if not path.exists(mean_img_path):\n mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)\n for idx, img_path in enumerate(imgs_paths):\n mean_img += imread(img_path, mode='RGB').astype(np.float32\n ) / len(imgs_paths)\n if idx % 1000 == 0:\n logger.info('%d/%d' % (idx, len(imgs_paths)))\n imsave(mean_img_path, mean_img)\n return imread(mean_img_path)\n\n def train_batch_gen(self, imgs_csv, imgs_dir, transform):\n logger = logging.getLogger(funcname())\n df = pd.read_csv(imgs_csv)\n imgs_paths = [('%s/%s.jpg' % (imgs_dir, n)) for n in df[\n 'image_name'].values]\n tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]\n mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' %\n self.cpdir)\n mean_img = mean_img.astype(np.float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n while True:\n imgs_batch = np.zeros([self.config['batch_size']] + self.config\n ['input_shape'])\n tags_batch = np.zeros([self.config['batch_size']] + self.config\n ['output_shape'])\n random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)),\n len(imgs_paths)))\n for batch_idx in range(self.config['batch_size']):\n data_idx = next(random_idxs)\n img = imread(imgs_paths[data_idx], mode='RGB')\n img = img_preprocess(img)\n img = resize(img, self.config['input_shape'],\n preserve_range=True, mode='constant')\n if transform:\n img = random_transforms(img, nb_min=0, nb_max=6)\n imgs_batch[batch_idx] = img\n tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])\n yield imgs_batch, tags_batch\n\n def predict(self, img_batch):\n imgs_paths = listdir(self.config['trn_imgs_dir'])\n mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir\n mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.\n float32) / 255.0\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32\n ) / 255.0 - mean_img_mean\n for idx in range(len(img_batch)):\n img_batch[idx] = img_preprocess(img_batch[idx])\n tags_pred = self.net.predict(img_batch)\n tags_pred = tags_pred.round().astype(np.uint8)\n return tags_pred\n\n\nif __name__ == '__main__':\n from planet.model_runner import model_runner\n model = VGGNet()\n model_runner(model)\n",
"step-5": "# VGGNet\nimport numpy as np\nnp.random.seed(317)\n\nfrom glob import glob\nfrom itertools import cycle\nfrom keras.applications.vgg19 import VGG19\nfrom keras.optimizers import Adam\nfrom keras.models import Model\nfrom keras.layers import Input, BatchNormalization, Flatten, Dropout, Dense\nfrom keras.utils import plot_model\nfrom keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, CSVLogger, EarlyStopping, Callback\nfrom keras.losses import kullback_leibler_divergence\nfrom math import ceil\nfrom os import path, mkdir, listdir\nfrom skimage.transform import resize\nfrom scipy.misc import imread, imsave\nfrom time import time\nimport argparse\nimport logging\nimport keras.backend as K\nimport pandas as pd\nimport tifffile as tif\n\nimport sys\nsys.path.append('.')\nfrom planet.utils.data_utils import tagset_to_ints, random_transforms\nfrom planet.utils.keras_utils import HistoryPlot\nfrom planet.utils.runtime import funcname\n\n\nclass VGGNet(object):\n\n def __init__(self, checkpoint_name='VGGNet'):\n\n self.config = {\n 'image_shape': [256, 256, 3],\n 'input_shape': [224, 224, 3],\n 'output_shape': [17, ],\n 'batch_size': 60,\n 'trn_steps': 680,\n 'trn_nb_epochs': 200,\n 'trn_transform': True,\n 'trn_imgs_csv': 'data/train_v2.csv',\n 'trn_imgs_dir': 'data/train-jpg',\n 'tst_imgs_csv': 'data/sample_submission_v2.csv',\n 'tst_imgs_dir': 'data/test-jpg'\n }\n\n self.checkpoint_name = checkpoint_name\n self.imgs = []\n self.lbls = []\n self.net = None\n self.rng = np.random\n\n @property\n def cpdir(self):\n cpdir = 'checkpoints/%s_%s/' % (self.checkpoint_name, '_'.join([str(x) for x in self.config['input_shape']]))\n if not path.exists(cpdir):\n mkdir(cpdir)\n return cpdir\n\n def create_net(self):\n\n x = inputs = Input(shape=self.config['input_shape'])\n vgg = VGG19(include_top=False, input_tensor=x)\n\n outputs = Flatten()(vgg.output)\n outputs = Dropout(0.1)(outputs)\n outputs = Dense(self.config['output_shape'][0], activation='sigmoid')(outputs)\n\n def true_pos(yt, yp):\n return K.sum(K.round(yt)) / K.sum(K.clip(yt, 1, 1))\n\n def pred_pos(yt, yp):\n return K.sum(K.round(yp)) / K.sum(K.clip(yt, 1, 1))\n\n def F2(yt, yp):\n yt, yp = K.round(yt), K.round(yp)\n tp = K.sum(yt * yp)\n fp = K.sum(K.clip(yp - yt, 0, 1))\n fn = K.sum(K.clip(yt - yp, 0, 1))\n p = tp / (tp + fp)\n r = tp / (tp + fn)\n b = 2.0\n return (1 + b**2) * ((p * r) / (b**2 * p + r + K.epsilon()))\n\n self.net = Model(inputs, outputs)\n self.net.compile(optimizer=Adam(0.001), loss='binary_crossentropy',\n metrics=['binary_accuracy', F2, true_pos, pred_pos])\n self.net.summary()\n plot_model(self.net, to_file='%s/net.png' % self.cpdir)\n return\n\n def train(self):\n\n batch_gen = self.train_batch_gen(self.config['trn_imgs_csv'], self.config[\n 'trn_imgs_dir'], self.config['trn_transform'])\n\n cb = [\n HistoryPlot('%s/history.png' % self.cpdir),\n CSVLogger('%s/history.csv' % self.cpdir),\n ModelCheckpoint('%s/loss.weights' % self.cpdir, monitor='loss', verbose=1,\n save_best_only=True, mode='min', save_weights_only=True),\n ModelCheckpoint('%s/F2.weights' % self.cpdir, monitor='F2',\n verbose=1, save_best_only=True, mode='max', save_weights_only=True),\n ReduceLROnPlateau(monitor='F2', factor=0.8, patience=2, epsilon=0.005, verbose=1, mode='min'),\n EarlyStopping(monitor='F2', min_delta=0.01, patience=10, verbose=1, mode='max')\n ]\n\n self.net.fit_generator(batch_gen, steps_per_epoch=self.config['trn_steps'], verbose=1, callbacks=cb,\n epochs=self.config['trn_nb_epochs'], workers=2, pickle_safe=True)\n\n return\n\n def get_mean_img(self, imgs_paths, mean_img_path):\n '''Compute the mean image from the given paths and save it to the given path.'''\n logger = logging.getLogger(funcname())\n if not path.exists(mean_img_path):\n mean_img = np.zeros(self.config['image_shape'], dtype=np.float32)\n for idx, img_path in enumerate(imgs_paths):\n mean_img += imread(img_path, mode='RGB').astype(np.float32) / len(imgs_paths)\n if idx % 1000 == 0:\n logger.info('%d/%d' % (idx, len(imgs_paths)))\n imsave(mean_img_path, mean_img)\n return imread(mean_img_path)\n\n def train_batch_gen(self, imgs_csv, imgs_dir, transform):\n\n logger = logging.getLogger(funcname())\n\n # Read the CSV and extract image names and tags.\n df = pd.read_csv(imgs_csv)\n imgs_paths = ['%s/%s.jpg' % (imgs_dir, n) for n in df['image_name'].values]\n tag_sets = [set(t.strip().split(' ')) for t in df['tags'].values]\n\n # Compute the mean image for pre-processing.\n mean_img = self.get_mean_img(imgs_paths, '%s/mean_img_trn.jpg' % self.cpdir)\n mean_img = mean_img.astype(np.float32) / 255.\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32) / 255. - mean_img_mean\n\n while True:\n\n imgs_batch = np.zeros([self.config['batch_size'], ] + self.config['input_shape'])\n tags_batch = np.zeros([self.config['batch_size'], ] + self.config['output_shape'])\n random_idxs = cycle(np.random.choice(np.arange(len(imgs_paths)), len(imgs_paths)))\n\n for batch_idx in range(self.config['batch_size']):\n data_idx = next(random_idxs)\n img = imread(imgs_paths[data_idx], mode='RGB')\n img = img_preprocess(img)\n img = resize(img, self.config['input_shape'], preserve_range=True, mode='constant')\n if transform:\n img = random_transforms(img, nb_min=0, nb_max=6)\n imgs_batch[batch_idx] = img\n tags_batch[batch_idx] = tagset_to_ints(tag_sets[data_idx])\n\n yield imgs_batch, tags_batch\n\n def predict(self, img_batch):\n\n # Get the mean image\n imgs_paths = listdir(self.config['trn_imgs_dir'])\n mean_img_path = '%s/mean_img_trn.jpg' % self.cpdir\n mean_img = self.get_mean_img(imgs_paths, mean_img_path).astype(np.float32) / 255.\n mean_img_mean = np.mean(mean_img)\n img_preprocess = lambda img: img.astype(np.float32) / 255. - mean_img_mean\n\n for idx in range(len(img_batch)):\n img_batch[idx] = img_preprocess(img_batch[idx])\n\n tags_pred = self.net.predict(img_batch)\n tags_pred = tags_pred.round().astype(np.uint8)\n return tags_pred\n\nif __name__ == \"__main__\":\n from planet.model_runner import model_runner\n model = VGGNet()\n model_runner(model)\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
import numpy as n, pylab as p
from scipy import stats as st
a=st.norm(0,1)
b=st.norm(0.1,1)
domain=n.linspace(-4,4,10000)
avals=a.cdf(domain)
bvals=b.cdf(domain)
diffN=n.abs(avals-bvals).max()
a=st.norm(0,1)
b=st.norm(0,1.2)
domain=n.linspace(-4,4,10000)
avals=a.cdf(domain)
bvals=b.cdf(domain)
diffN2=n.abs(avals-bvals).max()
a=st.uniform(0,1)
b=st.uniform(0.05,1.0)
domain=n.linspace(0,1.05,10000)
avals=a.cdf(domain)
bvals=b.cdf(domain)
diffU=n.abs(avals-bvals).max()
a=st.uniform(0,1)
b=st.uniform(-0.05,1.05)
domain=n.linspace(0,1.05,10000)
avals=a.cdf(domain)
bvals=b.cdf(domain)
diffU2=n.abs(avals-bvals).max()
#a=st.weibull(1.5)
#b=st.weibull(1.7)
#domain=n.linspace(0,1.05,10000)
#avals=a.cdf(domain)
#bvals=b.cdf(domain)
#diffW=n.abs(avals-bvals).max()
#a=st.power(1.5)
#b=st.power(1.7)
#domain=n.linspace(0,1.05,10000)
#avals=a.cdf(domain)
#bvals=b.cdf(domain)
#diffP=n.abs(avals-bvals).max()
#x = n.arange(1,100.)/50.
x=n.linspace(0,20,100000)
step=x[1]-x[0]
def weib(x,nn,a):
return (a / nn) * (x / nn)**(a - 1) * n.exp(-(x / nn)**a)
#count, bins, ignored = p.hist(n.random.weibull(5.,1000))
#x = n.arange(1,100.)/50.
#scale = count.max()/weib(x, 1., 5.).max()
W=weib(x, 1., 1.5)
W_=W/(W*step).sum()
W__=n.cumsum(W_)
W2=weib(x, 1., 1.7)
W2_=W2/(W2*step).sum()
W2__=n.cumsum(W2_)
diffW=n.abs(W_-W2_).max()
#p.plot(x, W_)
#p.plot(x, W2_)
##p.plot(x, weib(x, 1., 5.)*scale)
#p.show()
a=st.powerlaw(1.5)
b=st.powerlaw(1.7)
domain=n.linspace(0,5.05,10000)
avals=a.cdf(domain)
bvals=b.cdf(domain)
diffP=n.abs(avals-bvals).max()
print("distancias de KS para os modelos matematicos:", diffN,diffN2,diffU,diffU2,diffW,diffP)
# distancias de KS para os modelos matematicos:
# 0.0398776116762 0.0439947104098 0.0952338090952 0.047619047619 0.128565475845 0.0460149130584
# X = (-n.ln(U))^{1/a}
lb,rb,NE,shape1,shape2=0,10,10000,1.5,1.7
x=n.linspace(lb,rb,NE)
step=x[1]-x[0]
W=weib(x, 1., shape1)
W_=W/((W*step).sum())
W__=n.cumsum(W_)
W2=weib(x, 1., shape2)
W2_=W2/((W2*step).sum())
W2__=n.cumsum(W2_)
diffW=n.abs(W__-W2__).max()
lb,rb,NE,shape1,shape2=0,10,10000,1.5,1.7
x=n.linspace(lb,rb,NE)
step=x[1]-x[0]
W=weib(x, 1., shape1)
W_=W/((W).sum())
W__=n.cumsum(W_)
W2=weib(x, 1., shape2)
W2_=W2/((W2).sum())
W2__=n.cumsum(W2_)
diffW=n.abs(W__-W2__).max()
|
normal
|
{
"blob_id": "647258ee5f2f6f1cb8118bcf146b8959c65b70cd",
"index": 8045,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef weib(x, nn, a):\n return a / nn * (x / nn) ** (a - 1) * n.exp(-(x / nn) ** a)\n\n\n<mask token>\nprint('distancias de KS para os modelos matematicos:', diffN, diffN2, diffU,\n diffU2, diffW, diffP)\n<mask token>\n",
"step-3": "<mask token>\na = st.norm(0, 1)\nb = st.norm(0.1, 1)\ndomain = n.linspace(-4, 4, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffN = n.abs(avals - bvals).max()\na = st.norm(0, 1)\nb = st.norm(0, 1.2)\ndomain = n.linspace(-4, 4, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffN2 = n.abs(avals - bvals).max()\na = st.uniform(0, 1)\nb = st.uniform(0.05, 1.0)\ndomain = n.linspace(0, 1.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffU = n.abs(avals - bvals).max()\na = st.uniform(0, 1)\nb = st.uniform(-0.05, 1.05)\ndomain = n.linspace(0, 1.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffU2 = n.abs(avals - bvals).max()\nx = n.linspace(0, 20, 100000)\nstep = x[1] - x[0]\n\n\ndef weib(x, nn, a):\n return a / nn * (x / nn) ** (a - 1) * n.exp(-(x / nn) ** a)\n\n\nW = weib(x, 1.0, 1.5)\nW_ = W / (W * step).sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, 1.7)\nW2_ = W2 / (W2 * step).sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W_ - W2_).max()\na = st.powerlaw(1.5)\nb = st.powerlaw(1.7)\ndomain = n.linspace(0, 5.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffP = n.abs(avals - bvals).max()\nprint('distancias de KS para os modelos matematicos:', diffN, diffN2, diffU,\n diffU2, diffW, diffP)\nlb, rb, NE, shape1, shape2 = 0, 10, 10000, 1.5, 1.7\nx = n.linspace(lb, rb, NE)\nstep = x[1] - x[0]\nW = weib(x, 1.0, shape1)\nW_ = W / (W * step).sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, shape2)\nW2_ = W2 / (W2 * step).sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W__ - W2__).max()\nlb, rb, NE, shape1, shape2 = 0, 10, 10000, 1.5, 1.7\nx = n.linspace(lb, rb, NE)\nstep = x[1] - x[0]\nW = weib(x, 1.0, shape1)\nW_ = W / W.sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, shape2)\nW2_ = W2 / W2.sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W__ - W2__).max()\n",
"step-4": "import numpy as n, pylab as p\nfrom scipy import stats as st\na = st.norm(0, 1)\nb = st.norm(0.1, 1)\ndomain = n.linspace(-4, 4, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffN = n.abs(avals - bvals).max()\na = st.norm(0, 1)\nb = st.norm(0, 1.2)\ndomain = n.linspace(-4, 4, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffN2 = n.abs(avals - bvals).max()\na = st.uniform(0, 1)\nb = st.uniform(0.05, 1.0)\ndomain = n.linspace(0, 1.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffU = n.abs(avals - bvals).max()\na = st.uniform(0, 1)\nb = st.uniform(-0.05, 1.05)\ndomain = n.linspace(0, 1.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffU2 = n.abs(avals - bvals).max()\nx = n.linspace(0, 20, 100000)\nstep = x[1] - x[0]\n\n\ndef weib(x, nn, a):\n return a / nn * (x / nn) ** (a - 1) * n.exp(-(x / nn) ** a)\n\n\nW = weib(x, 1.0, 1.5)\nW_ = W / (W * step).sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, 1.7)\nW2_ = W2 / (W2 * step).sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W_ - W2_).max()\na = st.powerlaw(1.5)\nb = st.powerlaw(1.7)\ndomain = n.linspace(0, 5.05, 10000)\navals = a.cdf(domain)\nbvals = b.cdf(domain)\ndiffP = n.abs(avals - bvals).max()\nprint('distancias de KS para os modelos matematicos:', diffN, diffN2, diffU,\n diffU2, diffW, diffP)\nlb, rb, NE, shape1, shape2 = 0, 10, 10000, 1.5, 1.7\nx = n.linspace(lb, rb, NE)\nstep = x[1] - x[0]\nW = weib(x, 1.0, shape1)\nW_ = W / (W * step).sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, shape2)\nW2_ = W2 / (W2 * step).sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W__ - W2__).max()\nlb, rb, NE, shape1, shape2 = 0, 10, 10000, 1.5, 1.7\nx = n.linspace(lb, rb, NE)\nstep = x[1] - x[0]\nW = weib(x, 1.0, shape1)\nW_ = W / W.sum()\nW__ = n.cumsum(W_)\nW2 = weib(x, 1.0, shape2)\nW2_ = W2 / W2.sum()\nW2__ = n.cumsum(W2_)\ndiffW = n.abs(W__ - W2__).max()\n",
"step-5": "import numpy as n, pylab as p\nfrom scipy import stats as st\na=st.norm(0,1)\nb=st.norm(0.1,1)\ndomain=n.linspace(-4,4,10000)\navals=a.cdf(domain)\nbvals=b.cdf(domain)\ndiffN=n.abs(avals-bvals).max()\n\na=st.norm(0,1)\nb=st.norm(0,1.2)\ndomain=n.linspace(-4,4,10000)\navals=a.cdf(domain)\nbvals=b.cdf(domain)\ndiffN2=n.abs(avals-bvals).max()\n\na=st.uniform(0,1)\nb=st.uniform(0.05,1.0)\ndomain=n.linspace(0,1.05,10000)\navals=a.cdf(domain)\nbvals=b.cdf(domain)\ndiffU=n.abs(avals-bvals).max()\n\na=st.uniform(0,1)\nb=st.uniform(-0.05,1.05)\ndomain=n.linspace(0,1.05,10000)\navals=a.cdf(domain)\nbvals=b.cdf(domain)\ndiffU2=n.abs(avals-bvals).max()\n\n#a=st.weibull(1.5)\n#b=st.weibull(1.7)\n#domain=n.linspace(0,1.05,10000)\n#avals=a.cdf(domain)\n#bvals=b.cdf(domain)\n#diffW=n.abs(avals-bvals).max()\n\n#a=st.power(1.5)\n#b=st.power(1.7)\n#domain=n.linspace(0,1.05,10000)\n#avals=a.cdf(domain)\n#bvals=b.cdf(domain)\n#diffP=n.abs(avals-bvals).max()\n\n#x = n.arange(1,100.)/50.\nx=n.linspace(0,20,100000)\nstep=x[1]-x[0]\ndef weib(x,nn,a):\n return (a / nn) * (x / nn)**(a - 1) * n.exp(-(x / nn)**a)\n\n#count, bins, ignored = p.hist(n.random.weibull(5.,1000))\n#x = n.arange(1,100.)/50.\n#scale = count.max()/weib(x, 1., 5.).max()\nW=weib(x, 1., 1.5)\nW_=W/(W*step).sum()\nW__=n.cumsum(W_)\nW2=weib(x, 1., 1.7)\nW2_=W2/(W2*step).sum()\nW2__=n.cumsum(W2_)\ndiffW=n.abs(W_-W2_).max()\n#p.plot(x, W_)\n#p.plot(x, W2_)\n##p.plot(x, weib(x, 1., 5.)*scale)\n#p.show()\n\na=st.powerlaw(1.5)\nb=st.powerlaw(1.7)\ndomain=n.linspace(0,5.05,10000)\navals=a.cdf(domain)\nbvals=b.cdf(domain)\ndiffP=n.abs(avals-bvals).max()\n\nprint(\"distancias de KS para os modelos matematicos:\", diffN,diffN2,diffU,diffU2,diffW,diffP)\n# distancias de KS para os modelos matematicos:\n# 0.0398776116762 0.0439947104098 0.0952338090952 0.047619047619 0.128565475845 0.0460149130584\n\n\n# X = (-n.ln(U))^{1/a}\nlb,rb,NE,shape1,shape2=0,10,10000,1.5,1.7\nx=n.linspace(lb,rb,NE)\nstep=x[1]-x[0]\nW=weib(x, 1., shape1)\nW_=W/((W*step).sum())\nW__=n.cumsum(W_)\nW2=weib(x, 1., shape2)\nW2_=W2/((W2*step).sum())\nW2__=n.cumsum(W2_)\ndiffW=n.abs(W__-W2__).max()\n\n\nlb,rb,NE,shape1,shape2=0,10,10000,1.5,1.7\nx=n.linspace(lb,rb,NE)\nstep=x[1]-x[0]\nW=weib(x, 1., shape1)\nW_=W/((W).sum())\nW__=n.cumsum(W_)\nW2=weib(x, 1., shape2)\nW2_=W2/((W2).sum())\nW2__=n.cumsum(W2_)\ndiffW=n.abs(W__-W2__).max()\n\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
_base_ = [
'../models/cascade_rcnn_r50_fpn.py',
#'coco_instance.py',
'../datasets/dataset.py',
'../runtime/valid_search_wandb_runtime.py',
'../schedules/schedule_1x.py'
]
pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa
model = dict(
type='CascadeRCNN',
backbone=dict(
_delete_=True,
type='SwinTransformer',
embed_dims=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
mlp_ratio=4,
qkv_bias=True,
qk_scale=None,
drop_rate=0.,
attn_drop_rate=0.,
drop_path_rate=0.2,
patch_norm=True,
out_indices=(0, 1, 2, 3),
with_cp=False,
convert_weights=True,
init_cfg=dict(type='Pretrained', checkpoint=pretrained)),
neck=dict(in_channels=[96, 192, 384, 768])
#[256, 512, 1024, 2048]
)
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
# augmentation strategy originates from DETR / Sparse RCNN
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(
type='AutoAugment',
policies=[[
dict(
type='Resize',
img_scale=[(480, 1024), (512, 1024), (544, 1024), (576, 1024),
(608, 1024), (640, 1024), (672, 1024), (704, 1024),
(736, 1024), (768, 1024), (800, 1024)],
multiscale_mode='value',
keep_ratio=True)
],
[
dict(
type='Resize',
img_scale=[(400, 1024), (500, 1024), (600, 1024)],
multiscale_mode='value',
keep_ratio=True),
dict(
type='RandomCrop',
crop_type='absolute_range',
crop_size=(384, 600),
allow_negative_crop=True),
dict(
type='Resize',
img_scale=[(480, 1024), (512, 1024), (544, 1024),
(576, 1024), (608, 1024), (640, 1024),
(672, 1024), (704, 1024), (736, 1024),
(768, 1024), (800, 1024)],
multiscale_mode='value',
override=True,
keep_ratio=True)
]]),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(
type='AutoAugment',
policies=[[
dict(
type='Resize',
img_scale=[(480, 1024), (512, 1024), (544, 1024), (576, 1024),
(608, 1024), (640, 1024), (672, 1024), (704, 1024),
(736, 1024), (768, 1024), (800, 1024)],
multiscale_mode='value',
keep_ratio=True)
],
[
dict(
type='Resize',
img_scale=[(400, 1024), (500, 1024), (600, 1024)],
multiscale_mode='value',
keep_ratio=True),
dict(
type='RandomCrop',
crop_type='absolute_range',
crop_size=(384, 600),
allow_negative_crop=True),
dict(
type='Resize',
img_scale=[(480, 1024), (512, 1024), (544, 1024),
(576, 1024), (608, 1024), (640, 1024),
(672, 1024), (704, 1024), (736, 1024),
(768, 1024), (800, 1024)],
multiscale_mode='value',
override=True,
keep_ratio=True)
]]),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
data = dict(train=dict(pipeline=train_pipeline),val=dict(pipeline=val_pipeline))
evaluation = dict(interval=1, metric='bbox', save_best='bbox_mAP_50')
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(
# type='WandbLoggerHook',
# init_kwargs=dict(
# project='valid_search',
# name='YOUR_EXP'
# ))
])
# yapf:enable
custom_hooks = [dict(type='NumClassCheckHook')]
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)]
optimizer = dict(
_delete_=True,
type='AdamW',
lr=0.0001,
betas=(0.9, 0.999),
weight_decay=0.05,
paramwise_cfg=dict(
custom_keys={
'absolute_pos_embed': dict(decay_mult=0.),
'relative_position_bias_table': dict(decay_mult=0.),
'norm': dict(decay_mult=0.)
}))
lr_config = dict(warmup_iters=1000, step=[27, 33])
runner = dict(max_epochs=36)
|
normal
|
{
"blob_id": "2874e05d6d5e0f13924e5920db22ea3343707dfa",
"index": 3898,
"step-1": "<mask token>\n",
"step-2": "_base_ = ['../models/cascade_rcnn_r50_fpn.py', '../datasets/dataset.py',\n '../runtime/valid_search_wandb_runtime.py', '../schedules/schedule_1x.py']\npretrained = (\n 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth'\n )\nmodel = dict(type='CascadeRCNN', backbone=dict(_delete_=True, type=\n 'SwinTransformer', embed_dims=96, depths=[2, 2, 6, 2], num_heads=[3, 6,\n 12, 24], window_size=7, mlp_ratio=4, qkv_bias=True, qk_scale=None,\n drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.2, patch_norm=True,\n out_indices=(0, 1, 2, 3), with_cp=False, convert_weights=True, init_cfg\n =dict(type='Pretrained', checkpoint=pretrained)), neck=dict(in_channels\n =[96, 192, 384, 768]))\nimg_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, \n 57.375], to_rgb=True)\ntrain_pipeline = [dict(type='LoadImageFromFile'), dict(type=\n 'LoadAnnotations', with_bbox=True), dict(type='RandomFlip', flip_ratio=\n 0.5), dict(type='AutoAugment', policies=[[dict(type='Resize', img_scale\n =[(480, 1024), (512, 1024), (544, 1024), (576, 1024), (608, 1024), (640,\n 1024), (672, 1024), (704, 1024), (736, 1024), (768, 1024), (800, 1024)],\n multiscale_mode='value', keep_ratio=True)], [dict(type='Resize',\n img_scale=[(400, 1024), (500, 1024), (600, 1024)], multiscale_mode=\n 'value', keep_ratio=True), dict(type='RandomCrop', crop_type=\n 'absolute_range', crop_size=(384, 600), allow_negative_crop=True), dict\n (type='Resize', img_scale=[(480, 1024), (512, 1024), (544, 1024), (576,\n 1024), (608, 1024), (640, 1024), (672, 1024), (704, 1024), (736, 1024),\n (768, 1024), (800, 1024)], multiscale_mode='value', override=True,\n keep_ratio=True)]]), dict(type='Normalize', **img_norm_cfg), dict(type=\n 'Pad', size_divisor=32), dict(type='DefaultFormatBundle'), dict(type=\n 'Collect', keys=['img', 'gt_bboxes', 'gt_labels'])]\nval_pipeline = [dict(type='LoadImageFromFile'), dict(type='LoadAnnotations',\n with_bbox=True), dict(type='RandomFlip', flip_ratio=0.5), dict(type=\n 'AutoAugment', policies=[[dict(type='Resize', img_scale=[(480, 1024), (\n 512, 1024), (544, 1024), (576, 1024), (608, 1024), (640, 1024), (672, \n 1024), (704, 1024), (736, 1024), (768, 1024), (800, 1024)],\n multiscale_mode='value', keep_ratio=True)], [dict(type='Resize',\n img_scale=[(400, 1024), (500, 1024), (600, 1024)], multiscale_mode=\n 'value', keep_ratio=True), dict(type='RandomCrop', crop_type=\n 'absolute_range', crop_size=(384, 600), allow_negative_crop=True), dict\n (type='Resize', img_scale=[(480, 1024), (512, 1024), (544, 1024), (576,\n 1024), (608, 1024), (640, 1024), (672, 1024), (704, 1024), (736, 1024),\n (768, 1024), (800, 1024)], multiscale_mode='value', override=True,\n keep_ratio=True)]]), dict(type='Normalize', **img_norm_cfg), dict(type=\n 'Pad', size_divisor=32), dict(type='DefaultFormatBundle'), dict(type=\n 'Collect', keys=['img', 'gt_bboxes', 'gt_labels'])]\ndata = dict(train=dict(pipeline=train_pipeline), val=dict(pipeline=\n val_pipeline))\nevaluation = dict(interval=1, metric='bbox', save_best='bbox_mAP_50')\ncheckpoint_config = dict(interval=1)\nlog_config = dict(interval=50, hooks=[dict(type='TextLoggerHook')])\ncustom_hooks = [dict(type='NumClassCheckHook')]\ndist_params = dict(backend='nccl')\nlog_level = 'INFO'\nload_from = None\nresume_from = None\nworkflow = [('train', 1)]\noptimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999),\n weight_decay=0.05, paramwise_cfg=dict(custom_keys={'absolute_pos_embed':\n dict(decay_mult=0.0), 'relative_position_bias_table': dict(decay_mult=\n 0.0), 'norm': dict(decay_mult=0.0)}))\nlr_config = dict(warmup_iters=1000, step=[27, 33])\nrunner = dict(max_epochs=36)\n",
"step-3": "_base_ = [\n '../models/cascade_rcnn_r50_fpn.py',\n #'coco_instance.py',\n '../datasets/dataset.py',\n '../runtime/valid_search_wandb_runtime.py',\n '../schedules/schedule_1x.py'\n]\npretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa\nmodel = dict(\n type='CascadeRCNN',\n backbone=dict(\n _delete_=True,\n type='SwinTransformer',\n embed_dims=96,\n depths=[2, 2, 6, 2],\n num_heads=[3, 6, 12, 24],\n window_size=7,\n mlp_ratio=4,\n qkv_bias=True,\n qk_scale=None,\n drop_rate=0.,\n attn_drop_rate=0.,\n drop_path_rate=0.2,\n patch_norm=True,\n out_indices=(0, 1, 2, 3),\n with_cp=False,\n convert_weights=True,\n init_cfg=dict(type='Pretrained', checkpoint=pretrained)),\n neck=dict(in_channels=[96, 192, 384, 768])\n #[256, 512, 1024, 2048]\n)\nimg_norm_cfg = dict(\n mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)\n\n# augmentation strategy originates from DETR / Sparse RCNN\ntrain_pipeline = [\n dict(type='LoadImageFromFile'),\n dict(type='LoadAnnotations', with_bbox=True),\n dict(type='RandomFlip', flip_ratio=0.5),\n dict(\n type='AutoAugment',\n policies=[[\n dict(\n type='Resize',\n img_scale=[(480, 1024), (512, 1024), (544, 1024), (576, 1024),\n (608, 1024), (640, 1024), (672, 1024), (704, 1024),\n (736, 1024), (768, 1024), (800, 1024)],\n multiscale_mode='value',\n keep_ratio=True)\n ],\n [\n dict(\n type='Resize',\n img_scale=[(400, 1024), (500, 1024), (600, 1024)],\n multiscale_mode='value',\n keep_ratio=True),\n dict(\n type='RandomCrop',\n crop_type='absolute_range',\n crop_size=(384, 600),\n allow_negative_crop=True),\n dict(\n type='Resize',\n img_scale=[(480, 1024), (512, 1024), (544, 1024),\n (576, 1024), (608, 1024), (640, 1024),\n (672, 1024), (704, 1024), (736, 1024),\n (768, 1024), (800, 1024)],\n multiscale_mode='value',\n override=True,\n keep_ratio=True)\n ]]),\n dict(type='Normalize', **img_norm_cfg),\n dict(type='Pad', size_divisor=32),\n dict(type='DefaultFormatBundle'),\n dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),\n]\nval_pipeline = [\n dict(type='LoadImageFromFile'),\n dict(type='LoadAnnotations', with_bbox=True),\n dict(type='RandomFlip', flip_ratio=0.5),\n dict(\n type='AutoAugment',\n policies=[[\n dict(\n type='Resize',\n img_scale=[(480, 1024), (512, 1024), (544, 1024), (576, 1024),\n (608, 1024), (640, 1024), (672, 1024), (704, 1024),\n (736, 1024), (768, 1024), (800, 1024)],\n multiscale_mode='value',\n keep_ratio=True)\n ],\n [\n dict(\n type='Resize',\n img_scale=[(400, 1024), (500, 1024), (600, 1024)],\n multiscale_mode='value',\n keep_ratio=True),\n dict(\n type='RandomCrop',\n crop_type='absolute_range',\n crop_size=(384, 600),\n allow_negative_crop=True),\n dict(\n type='Resize',\n img_scale=[(480, 1024), (512, 1024), (544, 1024),\n (576, 1024), (608, 1024), (640, 1024),\n (672, 1024), (704, 1024), (736, 1024),\n (768, 1024), (800, 1024)],\n multiscale_mode='value',\n override=True,\n keep_ratio=True)\n ]]),\n dict(type='Normalize', **img_norm_cfg),\n dict(type='Pad', size_divisor=32),\n dict(type='DefaultFormatBundle'),\n dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),\n]\ndata = dict(train=dict(pipeline=train_pipeline),val=dict(pipeline=val_pipeline))\nevaluation = dict(interval=1, metric='bbox', save_best='bbox_mAP_50')\n\ncheckpoint_config = dict(interval=1)\n# yapf:disable\nlog_config = dict(\n interval=50,\n hooks=[\n dict(type='TextLoggerHook'),\n # dict(\n # type='WandbLoggerHook',\n # init_kwargs=dict(\n # project='valid_search',\n # name='YOUR_EXP'\n # ))\n ])\n# yapf:enable\ncustom_hooks = [dict(type='NumClassCheckHook')]\n\ndist_params = dict(backend='nccl')\nlog_level = 'INFO'\nload_from = None\nresume_from = None\nworkflow = [('train', 1)]\n\noptimizer = dict(\n _delete_=True,\n type='AdamW',\n lr=0.0001,\n betas=(0.9, 0.999),\n weight_decay=0.05,\n paramwise_cfg=dict(\n custom_keys={\n 'absolute_pos_embed': dict(decay_mult=0.),\n 'relative_position_bias_table': dict(decay_mult=0.),\n 'norm': dict(decay_mult=0.)\n }))\nlr_config = dict(warmup_iters=1000, step=[27, 33])\nrunner = dict(max_epochs=36)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import IECoreScene
class TestMotionPrimitive( unittest.TestCase ) :
def test( self ) :
m = IECoreScene.MotionPrimitive()
self.assertTrue( m.isInstanceOf( "MotionPrimitive" ) )
self.assertTrue( m.isInstanceOf( "VisibleRenderable" ) )
self.assertEqual( m.keys(), [] )
self.assertEqual( m.values(), [] )
self.assertEqual( len( m ), 0 )
self.assertRaises( Exception, m.__setitem__, "notAFloat", IECoreScene.PointsPrimitive( 1 ) )
m[0] = IECoreScene.PointsPrimitive( 1 )
self.assertEqual( len( m ), 1 )
self.assertEqual( m.keys(), [ 0 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )
m[1] = IECoreScene.PointsPrimitive( 1 )
self.assertEqual( len( m ), 2 )
self.assertEqual( m.keys(), [ 0, 1 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ), IECoreScene.PointsPrimitive( 1 ) ] )
iface = IECore.IndexedIO.create( os.path.join( "test", "motionPrimitive.fio" ), IECore.IndexedIO.OpenMode.Write )
m.save( iface, "test" )
mm = IECore.Object.load( iface, "test" )
self.assertEqual( m, mm )
mmm = m.copy()
self.assertEqual( m, mmm )
del m[0]
self.assertEqual( len( m ), 1 )
self.assertEqual( m.keys(), [ 1 ] )
self.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )
del m[1]
self.assertEqual( m.keys(), [] )
self.assertEqual( m.values(), [] )
self.assertEqual( len( m ), 0 )
def testItems( self ) :
m = IECoreScene.MotionPrimitive()
m[0] = IECoreScene.PointsPrimitive( 1 )
m[1] = IECoreScene.PointsPrimitive( 2 )
self.assertEqual( m.items(), [ ( 0, IECoreScene.PointsPrimitive( 1 ) ), ( 1, IECoreScene.PointsPrimitive( 2 ) ) ] )
def testHash( self ) :
m = IECoreScene.MotionPrimitive()
m2 = IECoreScene.MotionPrimitive()
self.assertEqual( m.hash(), m2.hash() )
m[0] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
m2[0] = IECoreScene.SpherePrimitive()
self.assertEqual( m.hash(), m2.hash() )
m[1] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
m2[2] = IECoreScene.SpherePrimitive()
self.assertNotEqual( m.hash(), m2.hash() )
def tearDown( self ) :
if os.path.isfile( os.path.join( "test", "motionPrimitive.fio" ) ):
os.remove( os.path.join( "test", "motionPrimitive.fio" ) )
if __name__ == "__main__":
unittest.main()
|
normal
|
{
"blob_id": "d4c297af395581c6d955eb31a842ab86e599d23c",
"index": 4576,
"step-1": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n <mask token>\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import os\nimport unittest\nimport IECore\nimport IECoreScene\n\n\nclass TestMotionPrimitive(unittest.TestCase):\n\n def test(self):\n m = IECoreScene.MotionPrimitive()\n self.assertTrue(m.isInstanceOf('MotionPrimitive'))\n self.assertTrue(m.isInstanceOf('VisibleRenderable'))\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n self.assertRaises(Exception, m.__setitem__, 'notAFloat',\n IECoreScene.PointsPrimitive(1))\n m[0] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [0])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n m[1] = IECoreScene.PointsPrimitive(1)\n self.assertEqual(len(m), 2)\n self.assertEqual(m.keys(), [0, 1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1),\n IECoreScene.PointsPrimitive(1)])\n iface = IECore.IndexedIO.create(os.path.join('test',\n 'motionPrimitive.fio'), IECore.IndexedIO.OpenMode.Write)\n m.save(iface, 'test')\n mm = IECore.Object.load(iface, 'test')\n self.assertEqual(m, mm)\n mmm = m.copy()\n self.assertEqual(m, mmm)\n del m[0]\n self.assertEqual(len(m), 1)\n self.assertEqual(m.keys(), [1])\n self.assertEqual(m.values(), [IECoreScene.PointsPrimitive(1)])\n del m[1]\n self.assertEqual(m.keys(), [])\n self.assertEqual(m.values(), [])\n self.assertEqual(len(m), 0)\n\n def testItems(self):\n m = IECoreScene.MotionPrimitive()\n m[0] = IECoreScene.PointsPrimitive(1)\n m[1] = IECoreScene.PointsPrimitive(2)\n self.assertEqual(m.items(), [(0, IECoreScene.PointsPrimitive(1)), (\n 1, IECoreScene.PointsPrimitive(2))])\n\n def testHash(self):\n m = IECoreScene.MotionPrimitive()\n m2 = IECoreScene.MotionPrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[0] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[0] = IECoreScene.SpherePrimitive()\n self.assertEqual(m.hash(), m2.hash())\n m[1] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n m2[2] = IECoreScene.SpherePrimitive()\n self.assertNotEqual(m.hash(), m2.hash())\n\n def tearDown(self):\n if os.path.isfile(os.path.join('test', 'motionPrimitive.fio')):\n os.remove(os.path.join('test', 'motionPrimitive.fio'))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "##########################################################################\n#\n# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright\n# notice, this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n#\n# * Neither the name of Image Engine Design nor the names of any\n# other contributors to this software may be used to endorse or\n# promote products derived from this software without specific prior\n# written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\n# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,\n# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\n# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n#\n##########################################################################\n\nimport os\nimport unittest\n\nimport IECore\nimport IECoreScene\n\nclass TestMotionPrimitive( unittest.TestCase ) :\n\n\tdef test( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tself.assertTrue( m.isInstanceOf( \"MotionPrimitive\" ) )\n\t\tself.assertTrue( m.isInstanceOf( \"VisibleRenderable\" ) )\n\n\t\tself.assertEqual( m.keys(), [] )\n\t\tself.assertEqual( m.values(), [] )\n\t\tself.assertEqual( len( m ), 0 )\n\n\t\tself.assertRaises( Exception, m.__setitem__, \"notAFloat\", IECoreScene.PointsPrimitive( 1 ) )\n\n\t\tm[0] = IECoreScene.PointsPrimitive( 1 )\n\t\tself.assertEqual( len( m ), 1 )\n\t\tself.assertEqual( m.keys(), [ 0 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tm[1] = IECoreScene.PointsPrimitive( 1 )\n\t\tself.assertEqual( len( m ), 2 )\n\t\tself.assertEqual( m.keys(), [ 0, 1 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ), IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tiface = IECore.IndexedIO.create( os.path.join( \"test\", \"motionPrimitive.fio\" ), IECore.IndexedIO.OpenMode.Write )\n\t\tm.save( iface, \"test\" )\n\n\t\tmm = IECore.Object.load( iface, \"test\" )\n\t\tself.assertEqual( m, mm )\n\n\t\tmmm = m.copy()\n\t\tself.assertEqual( m, mmm )\n\n\t\tdel m[0]\n\t\tself.assertEqual( len( m ), 1 )\n\t\tself.assertEqual( m.keys(), [ 1 ] )\n\t\tself.assertEqual( m.values(), [ IECoreScene.PointsPrimitive( 1 ) ] )\n\n\t\tdel m[1]\n\t\tself.assertEqual( m.keys(), [] )\n\t\tself.assertEqual( m.values(), [] )\n\t\tself.assertEqual( len( m ), 0 )\n\n\tdef testItems( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tm[0] = IECoreScene.PointsPrimitive( 1 )\n\t\tm[1] = IECoreScene.PointsPrimitive( 2 )\n\t\tself.assertEqual( m.items(), [ ( 0, IECoreScene.PointsPrimitive( 1 ) ), ( 1, IECoreScene.PointsPrimitive( 2 ) ) ] )\n\n\tdef testHash( self ) :\n\n\t\tm = IECoreScene.MotionPrimitive()\n\t\tm2 = IECoreScene.MotionPrimitive()\n\t\tself.assertEqual( m.hash(), m2.hash() )\n\n\t\tm[0] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\t\tm2[0] = IECoreScene.SpherePrimitive()\n\t\tself.assertEqual( m.hash(), m2.hash() )\n\n\t\tm[1] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\t\tm2[2] = IECoreScene.SpherePrimitive()\n\t\tself.assertNotEqual( m.hash(), m2.hash() )\n\n\tdef tearDown( self ) :\n\n\t\tif os.path.isfile( os.path.join( \"test\", \"motionPrimitive.fio\" ) ):\n\t\t\tos.remove( os.path.join( \"test\", \"motionPrimitive.fio\" ) )\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import os
import pathlib
import enum
import warnings
import colorama
import requests
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
import invoke
class MoleculeDriver(enum.Enum):
docker = 1
lxd = 2
vagrant = 3
class TestPlatform(enum.Enum):
linux = 1
ubuntu = 2
centos = 3
def print_header(header_text):
print(
colorama.Fore.CYAN + colorama.Style.BRIGHT +
f" {header_text} ".center(80, "=") +
colorama.Style.RESET_ALL
)
def print_sub_header(sub_header_text):
print(
colorama.Fore.CYAN + colorama.Style.BRIGHT + "--" +
f" {sub_header_text} ".ljust(78, "-") +
colorama.Style.RESET_ALL
)
def print_success_message(success_message_text):
print(
colorama.Fore.GREEN + colorama.Style.BRIGHT +
f" {success_message_text}: Success ".center(80, "=") +
colorama.Style.RESET_ALL
)
def run_command(context, *args, **kwargs):
try:
return context.run(*args, **kwargs)
except invoke.exceptions.Failure:
print(
colorama.Fore.RED + colorama.Style.BRIGHT +
"Failure: error executing '" + args[0] + "' command" +
colorama.Style.RESET_ALL
)
raise
def get_base_config_path(driver_code, platform_code):
base_config = "molecule/molecule_base_{driver}_{platform}.yml".format(
driver=driver_code.name, platform=platform_code.name
)
return str(pathlib.Path(__file__).resolve().parent / base_config)
def get_molecule_scenarios(context):
scenarios = []
for child_obj in (pathlib.Path.cwd() / "molecule").iterdir():
if child_obj.is_dir():
if (child_obj / "molecule.yml").exists():
scenarios.append(child_obj.name)
return sorted(scenarios)
def run_molecule(context, command, scenario, driver, platform="linux", env={}):
driver_code = MoleculeDriver[driver.lower()]
platform_code = TestPlatform[platform.lower()]
molecule_env = env.copy()
if driver_code == MoleculeDriver.lxd:
molecule_env.update({"MOLECULE_USER_NAME": "root"})
elif driver_code == MoleculeDriver.vagrant:
molecule_env.update({"MOLECULE_USER_NAME": "vagrant"})
molecule_command = (
f"molecule --base-config {get_base_config_path(driver_code, platform_code)} {command}"
)
if scenario is not None:
molecule_command += f" -s {scenario}"
run_command(context, molecule_command, env=molecule_env, echo=True)
def get_parameter_value(host, ansible_var_name, param_value, default_value):
if host.backend.HAS_RUN_ANSIBLE:
ansible_var_value = host.ansible.get_variables().get(ansible_var_name, None)
else:
ansible_var_value = None
return_value = ansible_var_value if param_value is None else param_value
if return_value is None:
return_value = default_value
return return_value
def get_github_release_info(release_url):
if "AO_GITHUB_OAUTH_TOKEN" in os.environ:
headers = {"Authorization": "token " + os.environ["AO_GITHUB_OAUTH_TOKEN"]}
else:
headers = None
return requests.get(
"https://api.github.com/repos/" + release_url, headers=headers
).json()
|
normal
|
{
"blob_id": "5bdc08b66916959d462314b8a6e5794e5fa12b55",
"index": 7986,
"step-1": "<mask token>\n\n\nclass MoleculeDriver(enum.Enum):\n docker = 1\n lxd = 2\n vagrant = 3\n\n\nclass TestPlatform(enum.Enum):\n linux = 1\n ubuntu = 2\n centos = 3\n\n\n<mask token>\n\n\ndef print_sub_header(sub_header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + '--' +\n f' {sub_header_text} '.ljust(78, '-') + colorama.Style.RESET_ALL)\n\n\ndef print_success_message(success_message_text):\n print(colorama.Fore.GREEN + colorama.Style.BRIGHT +\n f' {success_message_text}: Success '.center(80, '=') + colorama.\n Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef get_base_config_path(driver_code, platform_code):\n base_config = 'molecule/molecule_base_{driver}_{platform}.yml'.format(\n driver=driver_code.name, platform=platform_code.name)\n return str(pathlib.Path(__file__).resolve().parent / base_config)\n\n\ndef get_molecule_scenarios(context):\n scenarios = []\n for child_obj in (pathlib.Path.cwd() / 'molecule').iterdir():\n if child_obj.is_dir():\n if (child_obj / 'molecule.yml').exists():\n scenarios.append(child_obj.name)\n return sorted(scenarios)\n\n\n<mask token>\n\n\ndef get_parameter_value(host, ansible_var_name, param_value, default_value):\n if host.backend.HAS_RUN_ANSIBLE:\n ansible_var_value = host.ansible.get_variables().get(ansible_var_name,\n None)\n else:\n ansible_var_value = None\n return_value = ansible_var_value if param_value is None else param_value\n if return_value is None:\n return_value = default_value\n return return_value\n\n\ndef get_github_release_info(release_url):\n if 'AO_GITHUB_OAUTH_TOKEN' in os.environ:\n headers = {'Authorization': 'token ' + os.environ[\n 'AO_GITHUB_OAUTH_TOKEN']}\n else:\n headers = None\n return requests.get('https://api.github.com/repos/' + release_url,\n headers=headers).json()\n",
"step-2": "<mask token>\n\n\nclass MoleculeDriver(enum.Enum):\n docker = 1\n lxd = 2\n vagrant = 3\n\n\nclass TestPlatform(enum.Enum):\n linux = 1\n ubuntu = 2\n centos = 3\n\n\n<mask token>\n\n\ndef print_sub_header(sub_header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + '--' +\n f' {sub_header_text} '.ljust(78, '-') + colorama.Style.RESET_ALL)\n\n\ndef print_success_message(success_message_text):\n print(colorama.Fore.GREEN + colorama.Style.BRIGHT +\n f' {success_message_text}: Success '.center(80, '=') + colorama.\n Style.RESET_ALL)\n\n\n<mask token>\n\n\ndef get_base_config_path(driver_code, platform_code):\n base_config = 'molecule/molecule_base_{driver}_{platform}.yml'.format(\n driver=driver_code.name, platform=platform_code.name)\n return str(pathlib.Path(__file__).resolve().parent / base_config)\n\n\ndef get_molecule_scenarios(context):\n scenarios = []\n for child_obj in (pathlib.Path.cwd() / 'molecule').iterdir():\n if child_obj.is_dir():\n if (child_obj / 'molecule.yml').exists():\n scenarios.append(child_obj.name)\n return sorted(scenarios)\n\n\ndef run_molecule(context, command, scenario, driver, platform='linux', env={}):\n driver_code = MoleculeDriver[driver.lower()]\n platform_code = TestPlatform[platform.lower()]\n molecule_env = env.copy()\n if driver_code == MoleculeDriver.lxd:\n molecule_env.update({'MOLECULE_USER_NAME': 'root'})\n elif driver_code == MoleculeDriver.vagrant:\n molecule_env.update({'MOLECULE_USER_NAME': 'vagrant'})\n molecule_command = (\n f'molecule --base-config {get_base_config_path(driver_code, platform_code)} {command}'\n )\n if scenario is not None:\n molecule_command += f' -s {scenario}'\n run_command(context, molecule_command, env=molecule_env, echo=True)\n\n\ndef get_parameter_value(host, ansible_var_name, param_value, default_value):\n if host.backend.HAS_RUN_ANSIBLE:\n ansible_var_value = host.ansible.get_variables().get(ansible_var_name,\n None)\n else:\n ansible_var_value = None\n return_value = ansible_var_value if param_value is None else param_value\n if return_value is None:\n return_value = default_value\n return return_value\n\n\ndef get_github_release_info(release_url):\n if 'AO_GITHUB_OAUTH_TOKEN' in os.environ:\n headers = {'Authorization': 'token ' + os.environ[\n 'AO_GITHUB_OAUTH_TOKEN']}\n else:\n headers = None\n return requests.get('https://api.github.com/repos/' + release_url,\n headers=headers).json()\n",
"step-3": "<mask token>\n\n\nclass MoleculeDriver(enum.Enum):\n docker = 1\n lxd = 2\n vagrant = 3\n\n\nclass TestPlatform(enum.Enum):\n linux = 1\n ubuntu = 2\n centos = 3\n\n\ndef print_header(header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + f' {header_text} '.\n center(80, '=') + colorama.Style.RESET_ALL)\n\n\ndef print_sub_header(sub_header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + '--' +\n f' {sub_header_text} '.ljust(78, '-') + colorama.Style.RESET_ALL)\n\n\ndef print_success_message(success_message_text):\n print(colorama.Fore.GREEN + colorama.Style.BRIGHT +\n f' {success_message_text}: Success '.center(80, '=') + colorama.\n Style.RESET_ALL)\n\n\ndef run_command(context, *args, **kwargs):\n try:\n return context.run(*args, **kwargs)\n except invoke.exceptions.Failure:\n print(colorama.Fore.RED + colorama.Style.BRIGHT +\n \"Failure: error executing '\" + args[0] + \"' command\" + colorama\n .Style.RESET_ALL)\n raise\n\n\ndef get_base_config_path(driver_code, platform_code):\n base_config = 'molecule/molecule_base_{driver}_{platform}.yml'.format(\n driver=driver_code.name, platform=platform_code.name)\n return str(pathlib.Path(__file__).resolve().parent / base_config)\n\n\ndef get_molecule_scenarios(context):\n scenarios = []\n for child_obj in (pathlib.Path.cwd() / 'molecule').iterdir():\n if child_obj.is_dir():\n if (child_obj / 'molecule.yml').exists():\n scenarios.append(child_obj.name)\n return sorted(scenarios)\n\n\ndef run_molecule(context, command, scenario, driver, platform='linux', env={}):\n driver_code = MoleculeDriver[driver.lower()]\n platform_code = TestPlatform[platform.lower()]\n molecule_env = env.copy()\n if driver_code == MoleculeDriver.lxd:\n molecule_env.update({'MOLECULE_USER_NAME': 'root'})\n elif driver_code == MoleculeDriver.vagrant:\n molecule_env.update({'MOLECULE_USER_NAME': 'vagrant'})\n molecule_command = (\n f'molecule --base-config {get_base_config_path(driver_code, platform_code)} {command}'\n )\n if scenario is not None:\n molecule_command += f' -s {scenario}'\n run_command(context, molecule_command, env=molecule_env, echo=True)\n\n\ndef get_parameter_value(host, ansible_var_name, param_value, default_value):\n if host.backend.HAS_RUN_ANSIBLE:\n ansible_var_value = host.ansible.get_variables().get(ansible_var_name,\n None)\n else:\n ansible_var_value = None\n return_value = ansible_var_value if param_value is None else param_value\n if return_value is None:\n return_value = default_value\n return return_value\n\n\ndef get_github_release_info(release_url):\n if 'AO_GITHUB_OAUTH_TOKEN' in os.environ:\n headers = {'Authorization': 'token ' + os.environ[\n 'AO_GITHUB_OAUTH_TOKEN']}\n else:\n headers = None\n return requests.get('https://api.github.com/repos/' + release_url,\n headers=headers).json()\n",
"step-4": "<mask token>\nwith warnings.catch_warnings():\n warnings.filterwarnings('ignore', category=DeprecationWarning)\n import invoke\n\n\nclass MoleculeDriver(enum.Enum):\n docker = 1\n lxd = 2\n vagrant = 3\n\n\nclass TestPlatform(enum.Enum):\n linux = 1\n ubuntu = 2\n centos = 3\n\n\ndef print_header(header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + f' {header_text} '.\n center(80, '=') + colorama.Style.RESET_ALL)\n\n\ndef print_sub_header(sub_header_text):\n print(colorama.Fore.CYAN + colorama.Style.BRIGHT + '--' +\n f' {sub_header_text} '.ljust(78, '-') + colorama.Style.RESET_ALL)\n\n\ndef print_success_message(success_message_text):\n print(colorama.Fore.GREEN + colorama.Style.BRIGHT +\n f' {success_message_text}: Success '.center(80, '=') + colorama.\n Style.RESET_ALL)\n\n\ndef run_command(context, *args, **kwargs):\n try:\n return context.run(*args, **kwargs)\n except invoke.exceptions.Failure:\n print(colorama.Fore.RED + colorama.Style.BRIGHT +\n \"Failure: error executing '\" + args[0] + \"' command\" + colorama\n .Style.RESET_ALL)\n raise\n\n\ndef get_base_config_path(driver_code, platform_code):\n base_config = 'molecule/molecule_base_{driver}_{platform}.yml'.format(\n driver=driver_code.name, platform=platform_code.name)\n return str(pathlib.Path(__file__).resolve().parent / base_config)\n\n\ndef get_molecule_scenarios(context):\n scenarios = []\n for child_obj in (pathlib.Path.cwd() / 'molecule').iterdir():\n if child_obj.is_dir():\n if (child_obj / 'molecule.yml').exists():\n scenarios.append(child_obj.name)\n return sorted(scenarios)\n\n\ndef run_molecule(context, command, scenario, driver, platform='linux', env={}):\n driver_code = MoleculeDriver[driver.lower()]\n platform_code = TestPlatform[platform.lower()]\n molecule_env = env.copy()\n if driver_code == MoleculeDriver.lxd:\n molecule_env.update({'MOLECULE_USER_NAME': 'root'})\n elif driver_code == MoleculeDriver.vagrant:\n molecule_env.update({'MOLECULE_USER_NAME': 'vagrant'})\n molecule_command = (\n f'molecule --base-config {get_base_config_path(driver_code, platform_code)} {command}'\n )\n if scenario is not None:\n molecule_command += f' -s {scenario}'\n run_command(context, molecule_command, env=molecule_env, echo=True)\n\n\ndef get_parameter_value(host, ansible_var_name, param_value, default_value):\n if host.backend.HAS_RUN_ANSIBLE:\n ansible_var_value = host.ansible.get_variables().get(ansible_var_name,\n None)\n else:\n ansible_var_value = None\n return_value = ansible_var_value if param_value is None else param_value\n if return_value is None:\n return_value = default_value\n return return_value\n\n\ndef get_github_release_info(release_url):\n if 'AO_GITHUB_OAUTH_TOKEN' in os.environ:\n headers = {'Authorization': 'token ' + os.environ[\n 'AO_GITHUB_OAUTH_TOKEN']}\n else:\n headers = None\n return requests.get('https://api.github.com/repos/' + release_url,\n headers=headers).json()\n",
"step-5": "import os\nimport pathlib\nimport enum\nimport warnings\nimport colorama\nimport requests\nwith warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n import invoke\n\nclass MoleculeDriver(enum.Enum):\n docker = 1\n lxd = 2\n vagrant = 3\n\nclass TestPlatform(enum.Enum):\n linux = 1\n ubuntu = 2\n centos = 3\n\ndef print_header(header_text):\n print(\n colorama.Fore.CYAN + colorama.Style.BRIGHT +\n f\" {header_text} \".center(80, \"=\") +\n colorama.Style.RESET_ALL\n )\n\n\ndef print_sub_header(sub_header_text):\n print(\n colorama.Fore.CYAN + colorama.Style.BRIGHT + \"--\" +\n f\" {sub_header_text} \".ljust(78, \"-\") +\n colorama.Style.RESET_ALL\n )\n\n\ndef print_success_message(success_message_text):\n print(\n colorama.Fore.GREEN + colorama.Style.BRIGHT +\n f\" {success_message_text}: Success \".center(80, \"=\") +\n colorama.Style.RESET_ALL\n )\n\n\ndef run_command(context, *args, **kwargs):\n try:\n return context.run(*args, **kwargs)\n except invoke.exceptions.Failure:\n print(\n colorama.Fore.RED + colorama.Style.BRIGHT +\n \"Failure: error executing '\" + args[0] + \"' command\" +\n colorama.Style.RESET_ALL\n )\n raise\n\ndef get_base_config_path(driver_code, platform_code):\n base_config = \"molecule/molecule_base_{driver}_{platform}.yml\".format(\n driver=driver_code.name, platform=platform_code.name\n )\n return str(pathlib.Path(__file__).resolve().parent / base_config)\n\ndef get_molecule_scenarios(context):\n scenarios = []\n for child_obj in (pathlib.Path.cwd() / \"molecule\").iterdir():\n if child_obj.is_dir():\n if (child_obj / \"molecule.yml\").exists():\n scenarios.append(child_obj.name)\n return sorted(scenarios)\n\n\ndef run_molecule(context, command, scenario, driver, platform=\"linux\", env={}):\n driver_code = MoleculeDriver[driver.lower()]\n platform_code = TestPlatform[platform.lower()]\n molecule_env = env.copy()\n if driver_code == MoleculeDriver.lxd:\n molecule_env.update({\"MOLECULE_USER_NAME\": \"root\"})\n elif driver_code == MoleculeDriver.vagrant:\n molecule_env.update({\"MOLECULE_USER_NAME\": \"vagrant\"})\n molecule_command = (\n f\"molecule --base-config {get_base_config_path(driver_code, platform_code)} {command}\"\n )\n if scenario is not None:\n molecule_command += f\" -s {scenario}\"\n run_command(context, molecule_command, env=molecule_env, echo=True)\n\ndef get_parameter_value(host, ansible_var_name, param_value, default_value):\n if host.backend.HAS_RUN_ANSIBLE:\n ansible_var_value = host.ansible.get_variables().get(ansible_var_name, None)\n else:\n ansible_var_value = None\n return_value = ansible_var_value if param_value is None else param_value\n if return_value is None:\n return_value = default_value\n return return_value\n\ndef get_github_release_info(release_url):\n if \"AO_GITHUB_OAUTH_TOKEN\" in os.environ:\n headers = {\"Authorization\": \"token \" + os.environ[\"AO_GITHUB_OAUTH_TOKEN\"]}\n else:\n headers = None\n return requests.get(\n \"https://api.github.com/repos/\" + release_url, headers=headers\n ).json()\n",
"step-ids": [
10,
11,
13,
14,
16
]
}
|
[
10,
11,
13,
14,
16
] |
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# In[2]:
import os
GFE_PATH = "C:\Haely\MS2017\sem2\EE 259\Project\grammatical_facial_expression"
def load_a_affirm_data(gfe_path=GFE_PATH):
csv_patha = os.path.join(gfe_path, "a_affirmative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patha)
def load_a_affirm_target(gfe_path=GFE_PATH):
csv_targeta = os.path.join(gfe_path, "a_affirmative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targeta)
def load_a_cond_data(gfe_path=GFE_PATH):
csv_pathc = os.path.join(gfe_path, "a_conditional_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathc)
def load_a_cond_target(gfe_path=GFE_PATH):
csv_targetc = os.path.join(gfe_path, "a_conditional_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetc)
def load_a_doubtq_data(gfe_path=GFE_PATH):
csv_pathd = os.path.join(gfe_path, "a_doubt_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathd)
def load_a_doubtq_target(gfe_path=GFE_PATH):
csv_targetd = os.path.join(gfe_path, "a_doubts_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetd)
def load_a_emphasis_data(gfe_path=GFE_PATH):
csv_pathe = os.path.join(gfe_path, "a_emphasis_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathe)
def load_a_emphasis_target(gfe_path=GFE_PATH):
csv_targete = os.path.join(gfe_path, "a_emphasis_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targete)
def load_a_neg_data(gfe_path=GFE_PATH):
csv_pathn = os.path.join(gfe_path, "a_negative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathn)
def load_a_neg_target(gfe_path=GFE_PATH):
csv_targetn = os.path.join(gfe_path, "a_negative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetn)
def load_a_rel_data(gfe_path=GFE_PATH):
csv_pathr = os.path.join(gfe_path, "a_relative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathr)
def load_a_rel_target(gfe_path=GFE_PATH):
csv_targetr = os.path.join(gfe_path, "a_relative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetr)
def load_a_topics_data(gfe_path=GFE_PATH):
csv_patht = os.path.join(gfe_path, "a_topics_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patht)
def load_a_topics_target(gfe_path=GFE_PATH):
csv_targett = os.path.join(gfe_path, "a_topics_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targett)
def load_a_wh_data(gfe_path=GFE_PATH):
csv_pathw = os.path.join(gfe_path, "a_wh_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathw)
def load_a_wh_target(gfe_path=GFE_PATH):
csv_targetw = os.path.join(gfe_path, "a_wh_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetw)
def load_a_yn_data(gfe_path=GFE_PATH):
csv_pathy = os.path.join(gfe_path, "a_yn_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathy)
def load_a_yn_target(gfe_path=GFE_PATH):
csv_targety = os.path.join(gfe_path, "a_yn_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targety)
# In[3]:
def load_b_affirm_data(gfe_path=GFE_PATH):
csv_pathab = os.path.join(gfe_path, "b_affirmative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathab)
def load_b_affirm_target(gfe_path=GFE_PATH):
csv_targetab = os.path.join(gfe_path, "b_affirmative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetab)
def load_b_cond_data(gfe_path=GFE_PATH):
csv_pathcb = os.path.join(gfe_path, "b_conditional_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathcb)
def load_b_cond_target(gfe_path=GFE_PATH):
csv_targetcb = os.path.join(gfe_path, "b_conditional_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetcb)
def load_b_doubtq_data(gfe_path=GFE_PATH):
csv_pathdb = os.path.join(gfe_path, "b_doubt_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathdb)
def load_b_doubtq_target(gfe_path=GFE_PATH):
csv_targetdb = os.path.join(gfe_path, "b_doubt_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetdb)
def load_b_emphasis_data(gfe_path=GFE_PATH):
csv_patheb = os.path.join(gfe_path, "b_emphasis_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patheb)
def load_b_emphasis_target(gfe_path=GFE_PATH):
csv_targeteb = os.path.join(gfe_path, "b_emphasis_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targeteb)
def load_b_neg_data(gfe_path=GFE_PATH):
csv_pathnb = os.path.join(gfe_path, "b_negative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathnb)
def load_b_neg_target(gfe_path=GFE_PATH):
csv_targetnb = os.path.join(gfe_path, "b_negative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetnb)
def load_b_rel_data(gfe_path=GFE_PATH):
csv_pathrb = os.path.join(gfe_path, "b_relative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathrb)
def load_b_rel_target(gfe_path=GFE_PATH):
csv_targetrb = os.path.join(gfe_path, "b_relative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetrb)
def load_b_topics_data(gfe_path=GFE_PATH):
csv_pathtb = os.path.join(gfe_path, "b_topics_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathtb)
def load_b_topics_target(gfe_path=GFE_PATH):
csv_targettb = os.path.join(gfe_path, "b_topics_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targettb)
def load_b_wh_data(gfe_path=GFE_PATH):
csv_pathwb = os.path.join(gfe_path, "b_wh_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathwb)
def load_b_wh_target(gfe_path=GFE_PATH):
csv_targetwb = os.path.join(gfe_path, "b_wh_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetwb)
def load_b_yn_data(gfe_path=GFE_PATH):
csv_pathyb = os.path.join(gfe_path, "b_yn_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathyb)
def load_b_yn_target(gfe_path=GFE_PATH):
csv_targetyb = os.path.join(gfe_path, "b_yn_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetyb)
# In[4]:
affirmda = load_a_affirm_data()
affirmta = load_a_affirm_target()
condda = load_a_cond_data()
condta = load_a_cond_target()
doubtqda = load_a_doubtq_data()
doubtqta = load_a_doubtq_target()
emphda = load_a_emphasis_data()
emphta = load_a_emphasis_target()
negda = load_a_neg_data()
negta = load_a_neg_target()
relda = load_a_rel_data()
relta = load_a_rel_target()
topicsda = load_a_topics_data()
topicsta = load_a_topics_target()
whda = load_a_wh_data()
whta = load_a_wh_target()
ynda = load_a_yn_data()
ynta = load_a_yn_target()
# In[5]:
affirmdb = load_b_affirm_data()
affirmtb = load_b_affirm_target()
conddb = load_b_cond_data()
condtb = load_b_cond_target()
doubtqdb = load_b_doubtq_data()
doubtqtb = load_b_doubtq_target()
emphdb = load_b_emphasis_data()
emphtb = load_b_emphasis_target()
negdb = load_b_neg_data()
negtb = load_b_neg_target()
reldb = load_b_rel_data()
reltb = load_b_rel_target()
topicsdb = load_b_topics_data()
topicstb = load_b_topics_target()
whdb = load_b_wh_data()
whtb = load_b_wh_target()
yndb = load_b_yn_data()
yntb = load_b_yn_target()
# In[8]:
users_combine_affirmd = pd.concat([affirmda, affirmdb],ignore_index=True)
affirm_y = pd.concat([affirmta,affirmtb],ignore_index=True)
users_combine_condd = pd.concat([condda, conddb],ignore_index=True)
cond_y = pd.concat([condta, condtb],ignore_index=True)
users_combine_doubtqd = pd.concat([doubtqda, doubtqdb],ignore_index=True)
doubtq_y = pd.concat([doubtqta, doubtqtb],ignore_index=True)
users_combine_emphd = pd.concat([emphda, emphdb],ignore_index=True)
emph_y = pd.concat([emphta, emphtb],ignore_index=True)
users_combine_negd = pd.concat([negda, negdb],ignore_index=True)
neg_y = pd.concat([negta, negtb],ignore_index=True)
users_combine_reld = pd.concat([relda, reldb],ignore_index=True)
rel_y = pd.concat([relta, reltb],ignore_index=True)
users_combine_topicsd = pd.concat([topicsda, topicsdb],ignore_index=True)
topics_y = pd.concat([topicsta, topicstb],ignore_index=True)
users_combine_whd = pd.concat([whda, whdb],ignore_index=True)
wh_y = pd.concat([whta, whtb],ignore_index=True)
users_combine_ynd = pd.concat([ynda, yndb],ignore_index=True)
yn_y = pd.concat([ynta, yntb],ignore_index=True)
# In[11]:
users_combine_affirmd['affirm_y']=affirm_y
affirm_y.drop([10])
# In[12]:
users_combine_condd['cond_y']=cond_y
cond_y.drop([10])
# In[13]:
users_combine_doubtqd['doubtq_y']=doubtq_y
doubtq_y.drop([10])
# In[14]:
users_combine_emphd['emph_y']=emph_y
emph_y.drop([10])
# In[15]:
users_combine_negd['neg_y']=neg_y
neg_y.drop([10])
# In[16]:
users_combine_reld['rel_y']=rel_y
rel_y.drop([10])
# In[17]:
users_combine_topicsd['topics_y']=topics_y
topics_y.drop([10])
# In[18]:
users_combine_whd['wh_y']=wh_y
wh_y.drop([10])
# In[19]:
users_combine_ynd['yn_y']=yn_y
yn_y.drop([10])
# In[22]:
from sklearn.model_selection import train_test_split
ya=users_combine_affirmd['affirm_y']
Xa_train,Xa_test,ya_train,ya_test = train_test_split(users_combine_affirmd.iloc[:,1:],ya,stratify=ya)
yc=users_combine_condd['cond_y']
Xc_train,Xc_test,yc_train,yc_test = train_test_split(users_combine_condd.iloc[:,1:],yc,stratify=yc)
yd=users_combine_doubtqd['doubtq_y']
Xd_train,Xd_test,yd_train,yd_test = train_test_split(users_combine_doubtqd.iloc[:,1:],yd,stratify=yd)
ye=users_combine_emphd['emph_y']
Xe_train,Xe_test,ye_train,ye_test = train_test_split(users_combine_emphd.iloc[:,1:],ye,stratify=ye)
yn=users_combine_negd['neg_y']
Xn_train,Xn_test,yn_train,yn_test = train_test_split(users_combine_negd.iloc[:,1:],yn,stratify=yn)
yr=users_combine_reld['rel_y']
Xr_train,Xr_test,yr_train,yr_test = train_test_split(users_combine_reld.iloc[:,1:],yr,stratify=yr)
yt=users_combine_topicsd['topics_y']
Xt_train,Xt_test,yt_train,yt_test = train_test_split(users_combine_topicsd.iloc[:,1:],yt,stratify=yt)
yw=users_combine_whd['wh_y']
Xw_train,Xw_test,yw_train,yw_test = train_test_split(users_combine_whd.iloc[:,1:],yw,stratify=yw)
yy=users_combine_ynd['yn_y']
Xy_train,Xy_test,yy_train,yy_test = train_test_split(users_combine_ynd.iloc[:,1:],yy,stratify=yy)
# In[25]:
from sklearn.preprocessing import scale
from scipy import stats
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
lda_clf = LDA(solver='lsqr',store_covariance=True)
lda_clf.fit(Xa_train,ya_train)
ya_predicted = lda_clf.predict(Xa_test)
print('\n The error rate of the LDA model for affirm is {0:.2f}% '.format(100*np.mean(ya_predicted!=ya_test)))
lda_clf.fit(Xc_train,yc_train)
yc_predicted = lda_clf.predict(Xc_test)
print('\n The error rate of the LDA model for conditional is {0:.2f}% '.format(100*np.mean(yc_predicted!=yc_test)))
lda_clf.fit(Xd_train,yd_train)
yd_predicted = lda_clf.predict(Xd_test)
print('\n The error rate of the LDA model for doubt questions is {0:.2f}% '.format(100*np.mean(yd_predicted!=yd_test)))
lda_clf.fit(Xe_train,ye_train)
ye_predicted = lda_clf.predict(Xe_test)
print('\n The error rate of the LDA model for emphasis is {0:.2f}% '.format(100*np.mean(ye_predicted!=ye_test)))
lda_clf.fit(Xn_train,yn_train)
yn_predicted = lda_clf.predict(Xn_test)
print('\n The error rate of the LDA model for negative is {0:.2f}% '.format(100*np.mean(yn_predicted!=yn_test)))
lda_clf.fit(Xr_train,yr_train)
yr_predicted = lda_clf.predict(Xr_test)
print('\n The error rate of the LDA model for relativr is {0:.2f}% '.format(100*np.mean(yr_predicted!=yr_test)))
lda_clf.fit(Xt_train,yt_train)
yt_predicted = lda_clf.predict(Xt_test)
print('\n The error rate of the LDA model for topics is {0:.2f}% '.format(100*np.mean(yt_predicted!=yt_test)))
lda_clf.fit(Xw_train,yw_train)
yw_predicted = lda_clf.predict(Xw_test)
print('\n The error rate of the LDA model for wh questions is {0:.2f}% '.format(100*np.mean(yw_predicted!=yw_test)))
lda_clf.fit(Xy_train,yy_train)
yy_predicted = lda_clf.predict(Xy_test)
print('\n The error rate of the LDA model for yes or no is {0:.2f}% '.format(100*np.mean(yy_predicted!=yy_test)))
|
normal
|
{
"blob_id": "2fb8bce3a64787dbaf5a3bb3da53f70005048467",
"index": 4104,
"step-1": "<mask token>\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\n<mask token>\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\n<mask token>\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\n<mask token>\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\n<mask token>\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, 'a_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathd)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, 'b_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, 'a_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathd)\n\n\n<mask token>\n\n\ndef load_a_emphasis_data(gfe_path=GFE_PATH):\n csv_pathe = os.path.join(gfe_path, 'a_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathe)\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, 'b_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-5": "\n# coding: utf-8\n\n# In[1]:\n\n\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\n# In[2]:\n\n\nimport os\nGFE_PATH = \"C:\\Haely\\MS2017\\sem2\\EE 259\\Project\\grammatical_facial_expression\"\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, \"a_affirmative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patha)\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, \"a_affirmative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, \"a_conditional_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathc)\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, \"a_conditional_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, \"a_doubt_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathd)\ndef load_a_doubtq_target(gfe_path=GFE_PATH):\n csv_targetd = os.path.join(gfe_path, \"a_doubts_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetd)\n\ndef load_a_emphasis_data(gfe_path=GFE_PATH):\n csv_pathe = os.path.join(gfe_path, \"a_emphasis_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathe)\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, \"a_emphasis_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, \"a_negative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathn)\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, \"a_negative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, \"a_relative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathr)\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, \"a_relative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, \"a_topics_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patht)\ndef load_a_topics_target(gfe_path=GFE_PATH):\n csv_targett = os.path.join(gfe_path, \"a_topics_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targett)\n\ndef load_a_wh_data(gfe_path=GFE_PATH):\n csv_pathw = os.path.join(gfe_path, \"a_wh_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathw)\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, \"a_wh_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, \"a_yn_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathy)\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, \"a_yn_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\n# In[3]:\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, \"b_affirmative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathab)\ndef load_b_affirm_target(gfe_path=GFE_PATH):\n csv_targetab = os.path.join(gfe_path, \"b_affirmative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetab)\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, \"b_conditional_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, \"b_conditional_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, \"b_doubt_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, \"b_doubt_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, \"b_emphasis_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patheb)\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, \"b_emphasis_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, \"b_negative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, \"b_negative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, \"b_relative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, \"b_relative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, \"b_topics_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\ndef load_b_topics_target(gfe_path=GFE_PATH):\n csv_targettb = os.path.join(gfe_path, \"b_topics_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targettb)\n\ndef load_b_wh_data(gfe_path=GFE_PATH):\n csv_pathwb = os.path.join(gfe_path, \"b_wh_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathwb)\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, \"b_wh_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, \"b_yn_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, \"b_yn_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n# In[4]:\n\n\naffirmda = load_a_affirm_data()\naffirmta = load_a_affirm_target()\n\ncondda = load_a_cond_data()\ncondta = load_a_cond_target()\n\ndoubtqda = load_a_doubtq_data()\ndoubtqta = load_a_doubtq_target()\n\nemphda = load_a_emphasis_data()\nemphta = load_a_emphasis_target()\n\nnegda = load_a_neg_data()\nnegta = load_a_neg_target()\n\nrelda = load_a_rel_data()\nrelta = load_a_rel_target()\n\ntopicsda = load_a_topics_data()\ntopicsta = load_a_topics_target()\n\nwhda = load_a_wh_data()\nwhta = load_a_wh_target()\n\nynda = load_a_yn_data()\nynta = load_a_yn_target()\n\n\n# In[5]:\n\n\naffirmdb = load_b_affirm_data()\naffirmtb = load_b_affirm_target()\n\nconddb = load_b_cond_data()\ncondtb = load_b_cond_target()\n\ndoubtqdb = load_b_doubtq_data()\ndoubtqtb = load_b_doubtq_target()\n\nemphdb = load_b_emphasis_data()\nemphtb = load_b_emphasis_target()\n\nnegdb = load_b_neg_data()\nnegtb = load_b_neg_target()\n\nreldb = load_b_rel_data()\nreltb = load_b_rel_target()\n\ntopicsdb = load_b_topics_data()\ntopicstb = load_b_topics_target()\n\nwhdb = load_b_wh_data()\nwhtb = load_b_wh_target()\n\nyndb = load_b_yn_data()\nyntb = load_b_yn_target()\n\n\n# In[8]:\n\n\nusers_combine_affirmd = pd.concat([affirmda, affirmdb],ignore_index=True)\naffirm_y = pd.concat([affirmta,affirmtb],ignore_index=True)\n\nusers_combine_condd = pd.concat([condda, conddb],ignore_index=True)\ncond_y = pd.concat([condta, condtb],ignore_index=True)\n\nusers_combine_doubtqd = pd.concat([doubtqda, doubtqdb],ignore_index=True)\ndoubtq_y = pd.concat([doubtqta, doubtqtb],ignore_index=True)\n\nusers_combine_emphd = pd.concat([emphda, emphdb],ignore_index=True)\nemph_y = pd.concat([emphta, emphtb],ignore_index=True)\n\nusers_combine_negd = pd.concat([negda, negdb],ignore_index=True)\nneg_y = pd.concat([negta, negtb],ignore_index=True)\n\nusers_combine_reld = pd.concat([relda, reldb],ignore_index=True)\nrel_y = pd.concat([relta, reltb],ignore_index=True)\n\nusers_combine_topicsd = pd.concat([topicsda, topicsdb],ignore_index=True)\ntopics_y = pd.concat([topicsta, topicstb],ignore_index=True)\n\nusers_combine_whd = pd.concat([whda, whdb],ignore_index=True)\nwh_y = pd.concat([whta, whtb],ignore_index=True)\n\nusers_combine_ynd = pd.concat([ynda, yndb],ignore_index=True)\nyn_y = pd.concat([ynta, yntb],ignore_index=True)\n\n\n# In[11]:\n\n\nusers_combine_affirmd['affirm_y']=affirm_y\naffirm_y.drop([10]) \n\n\n\n# In[12]:\n\n\nusers_combine_condd['cond_y']=cond_y\ncond_y.drop([10]) \n\n\n# In[13]:\n\n\nusers_combine_doubtqd['doubtq_y']=doubtq_y\ndoubtq_y.drop([10]) \n\n\n# In[14]:\n\n\nusers_combine_emphd['emph_y']=emph_y\nemph_y.drop([10]) \n\n\n# In[15]:\n\n\nusers_combine_negd['neg_y']=neg_y\nneg_y.drop([10]) \n\n\n# In[16]:\n\n\nusers_combine_reld['rel_y']=rel_y\nrel_y.drop([10]) \n\n\n# In[17]:\n\n\nusers_combine_topicsd['topics_y']=topics_y\ntopics_y.drop([10]) \n\n\n# In[18]:\n\n\nusers_combine_whd['wh_y']=wh_y\nwh_y.drop([10]) \n\n\n# In[19]:\n\n\nusers_combine_ynd['yn_y']=yn_y\nyn_y.drop([10]) \n\n\n# In[22]:\n\n\nfrom sklearn.model_selection import train_test_split\nya=users_combine_affirmd['affirm_y']\nXa_train,Xa_test,ya_train,ya_test = train_test_split(users_combine_affirmd.iloc[:,1:],ya,stratify=ya)\n\nyc=users_combine_condd['cond_y']\nXc_train,Xc_test,yc_train,yc_test = train_test_split(users_combine_condd.iloc[:,1:],yc,stratify=yc)\n\nyd=users_combine_doubtqd['doubtq_y']\nXd_train,Xd_test,yd_train,yd_test = train_test_split(users_combine_doubtqd.iloc[:,1:],yd,stratify=yd)\n\nye=users_combine_emphd['emph_y']\nXe_train,Xe_test,ye_train,ye_test = train_test_split(users_combine_emphd.iloc[:,1:],ye,stratify=ye)\n\nyn=users_combine_negd['neg_y']\nXn_train,Xn_test,yn_train,yn_test = train_test_split(users_combine_negd.iloc[:,1:],yn,stratify=yn)\n\nyr=users_combine_reld['rel_y']\nXr_train,Xr_test,yr_train,yr_test = train_test_split(users_combine_reld.iloc[:,1:],yr,stratify=yr)\n\nyt=users_combine_topicsd['topics_y']\nXt_train,Xt_test,yt_train,yt_test = train_test_split(users_combine_topicsd.iloc[:,1:],yt,stratify=yt)\n\nyw=users_combine_whd['wh_y']\nXw_train,Xw_test,yw_train,yw_test = train_test_split(users_combine_whd.iloc[:,1:],yw,stratify=yw)\n\nyy=users_combine_ynd['yn_y']\nXy_train,Xy_test,yy_train,yy_test = train_test_split(users_combine_ynd.iloc[:,1:],yy,stratify=yy)\n\n\n\n# In[25]:\n\n\nfrom sklearn.preprocessing import scale\nfrom scipy import stats\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\nlda_clf = LDA(solver='lsqr',store_covariance=True)\n\nlda_clf.fit(Xa_train,ya_train)\nya_predicted = lda_clf.predict(Xa_test)\nprint('\\n The error rate of the LDA model for affirm is {0:.2f}% '.format(100*np.mean(ya_predicted!=ya_test)))\n\nlda_clf.fit(Xc_train,yc_train)\nyc_predicted = lda_clf.predict(Xc_test)\nprint('\\n The error rate of the LDA model for conditional is {0:.2f}% '.format(100*np.mean(yc_predicted!=yc_test)))\n\nlda_clf.fit(Xd_train,yd_train)\nyd_predicted = lda_clf.predict(Xd_test)\nprint('\\n The error rate of the LDA model for doubt questions is {0:.2f}% '.format(100*np.mean(yd_predicted!=yd_test)))\n\nlda_clf.fit(Xe_train,ye_train)\nye_predicted = lda_clf.predict(Xe_test)\nprint('\\n The error rate of the LDA model for emphasis is {0:.2f}% '.format(100*np.mean(ye_predicted!=ye_test)))\n\nlda_clf.fit(Xn_train,yn_train)\nyn_predicted = lda_clf.predict(Xn_test)\nprint('\\n The error rate of the LDA model for negative is {0:.2f}% '.format(100*np.mean(yn_predicted!=yn_test)))\n\nlda_clf.fit(Xr_train,yr_train)\nyr_predicted = lda_clf.predict(Xr_test)\nprint('\\n The error rate of the LDA model for relativr is {0:.2f}% '.format(100*np.mean(yr_predicted!=yr_test)))\n\nlda_clf.fit(Xt_train,yt_train)\nyt_predicted = lda_clf.predict(Xt_test)\nprint('\\n The error rate of the LDA model for topics is {0:.2f}% '.format(100*np.mean(yt_predicted!=yt_test)))\n\nlda_clf.fit(Xw_train,yw_train)\nyw_predicted = lda_clf.predict(Xw_test)\nprint('\\n The error rate of the LDA model for wh questions is {0:.2f}% '.format(100*np.mean(yw_predicted!=yw_test)))\n\nlda_clf.fit(Xy_train,yy_train)\nyy_predicted = lda_clf.predict(Xy_test)\nprint('\\n The error rate of the LDA model for yes or no is {0:.2f}% '.format(100*np.mean(yy_predicted!=yy_test)))\n\n",
"step-ids": [
19,
27,
29,
30,
40
]
}
|
[
19,
27,
29,
30,
40
] |
import sys, os
sys.path.append(os.pardir) # 親ディレクトリのファイルをインポートするための設定
import numpy as np
from dataset.mnist import load_mnist
from controller import Controller
# データの読み込み
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label=True)
# instance
controller = Controller()
# accuracy
trycount = 1000
accuracy_cnt = 0
result = np.zeros((10, 10))
for i in range(len(x_test)):
p = controller.accuracy(x_test[i])
a = np.argmax(t_test[i])
#print("p = " + str(p))
#print("a = " + str(a))
result[p][a] += 1
#print(t_test[i])
if p == a:
accuracy_cnt += 1
if (i == trycount):
break
print("Accuracy:" + str(float(accuracy_cnt) / trycount))
print(result)
|
normal
|
{
"blob_id": "c2d8e34ab0b449a971c920fc86f259f093f16cc5",
"index": 7156,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append(os.pardir)\n<mask token>\nfor i in range(len(x_test)):\n p = controller.accuracy(x_test[i])\n a = np.argmax(t_test[i])\n result[p][a] += 1\n if p == a:\n accuracy_cnt += 1\n if i == trycount:\n break\nprint('Accuracy:' + str(float(accuracy_cnt) / trycount))\nprint(result)\n",
"step-3": "<mask token>\nsys.path.append(os.pardir)\n<mask token>\n(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True,\n one_hot_label=True)\ncontroller = Controller()\ntrycount = 1000\naccuracy_cnt = 0\nresult = np.zeros((10, 10))\nfor i in range(len(x_test)):\n p = controller.accuracy(x_test[i])\n a = np.argmax(t_test[i])\n result[p][a] += 1\n if p == a:\n accuracy_cnt += 1\n if i == trycount:\n break\nprint('Accuracy:' + str(float(accuracy_cnt) / trycount))\nprint(result)\n",
"step-4": "import sys, os\nsys.path.append(os.pardir)\nimport numpy as np\nfrom dataset.mnist import load_mnist\nfrom controller import Controller\n(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True,\n one_hot_label=True)\ncontroller = Controller()\ntrycount = 1000\naccuracy_cnt = 0\nresult = np.zeros((10, 10))\nfor i in range(len(x_test)):\n p = controller.accuracy(x_test[i])\n a = np.argmax(t_test[i])\n result[p][a] += 1\n if p == a:\n accuracy_cnt += 1\n if i == trycount:\n break\nprint('Accuracy:' + str(float(accuracy_cnt) / trycount))\nprint(result)\n",
"step-5": "import sys, os\nsys.path.append(os.pardir) # 親ディレクトリのファイルをインポートするための設定\nimport numpy as np\nfrom dataset.mnist import load_mnist\nfrom controller import Controller\n\n# データの読み込み\n(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True, one_hot_label=True)\n\n# instance\ncontroller = Controller()\n\n# accuracy\ntrycount = 1000\naccuracy_cnt = 0\nresult = np.zeros((10, 10))\n\nfor i in range(len(x_test)):\n p = controller.accuracy(x_test[i])\n a = np.argmax(t_test[i])\n\n #print(\"p = \" + str(p))\n #print(\"a = \" + str(a))\n result[p][a] += 1\n #print(t_test[i])\n if p == a:\n accuracy_cnt += 1\n\n if (i == trycount):\n break\nprint(\"Accuracy:\" + str(float(accuracy_cnt) / trycount))\nprint(result)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pymysql
class DB:
def __init__(self, host='localhost', port=3306, db_='test', user='wj',
passwd='', charset='utf8'):
self.db = db_
self.conn = pymysql.connect(host=host, port=port, db=db_, user=user, passwd=passwd, charset=charset)
self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.conn.commit()
self.cur.close()
self.conn.close()
def write(self, data):
sql = "INSERT INTO {}({}) VALUES ('%s')".format('data', 'a') % data
self.cur.execute(sql)
self.conn.commit()
def read(self):
sql = "SELECT * FROM {}".format('data')
self.cur.execute(sql)
results = self.cur.fetchall()
return results[0]['a']
if __name__ == '__main__':
test = [1, 2, 3, 4, 5, 6, 7]
with DB() as db:
db.write(str(test))
a = eval(db.read())
print(a[2:])
|
normal
|
{
"blob_id": "80ad4459436e2e1cc44509e7dae18d1539bf2bc0",
"index": 8139,
"step-1": "<mask token>\n\n\nclass DB:\n <mask token>\n\n def __enter__(self):\n return self\n <mask token>\n\n def write(self, data):\n sql = \"INSERT INTO {}({}) VALUES ('%s')\".format('data', 'a') % data\n self.cur.execute(sql)\n self.conn.commit()\n\n def read(self):\n sql = 'SELECT * FROM {}'.format('data')\n self.cur.execute(sql)\n results = self.cur.fetchall()\n return results[0]['a']\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DB:\n\n def __init__(self, host='localhost', port=3306, db_='test', user='wj',\n passwd='', charset='utf8'):\n self.db = db_\n self.conn = pymysql.connect(host=host, port=port, db=db_, user=user,\n passwd=passwd, charset=charset)\n self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.conn.commit()\n self.cur.close()\n self.conn.close()\n\n def write(self, data):\n sql = \"INSERT INTO {}({}) VALUES ('%s')\".format('data', 'a') % data\n self.cur.execute(sql)\n self.conn.commit()\n\n def read(self):\n sql = 'SELECT * FROM {}'.format('data')\n self.cur.execute(sql)\n results = self.cur.fetchall()\n return results[0]['a']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DB:\n\n def __init__(self, host='localhost', port=3306, db_='test', user='wj',\n passwd='', charset='utf8'):\n self.db = db_\n self.conn = pymysql.connect(host=host, port=port, db=db_, user=user,\n passwd=passwd, charset=charset)\n self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.conn.commit()\n self.cur.close()\n self.conn.close()\n\n def write(self, data):\n sql = \"INSERT INTO {}({}) VALUES ('%s')\".format('data', 'a') % data\n self.cur.execute(sql)\n self.conn.commit()\n\n def read(self):\n sql = 'SELECT * FROM {}'.format('data')\n self.cur.execute(sql)\n results = self.cur.fetchall()\n return results[0]['a']\n\n\nif __name__ == '__main__':\n test = [1, 2, 3, 4, 5, 6, 7]\n with DB() as db:\n db.write(str(test))\n a = eval(db.read())\n print(a[2:])\n",
"step-4": "import pymysql\n\n\nclass DB:\n\n def __init__(self, host='localhost', port=3306, db_='test', user='wj',\n passwd='', charset='utf8'):\n self.db = db_\n self.conn = pymysql.connect(host=host, port=port, db=db_, user=user,\n passwd=passwd, charset=charset)\n self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.conn.commit()\n self.cur.close()\n self.conn.close()\n\n def write(self, data):\n sql = \"INSERT INTO {}({}) VALUES ('%s')\".format('data', 'a') % data\n self.cur.execute(sql)\n self.conn.commit()\n\n def read(self):\n sql = 'SELECT * FROM {}'.format('data')\n self.cur.execute(sql)\n results = self.cur.fetchall()\n return results[0]['a']\n\n\nif __name__ == '__main__':\n test = [1, 2, 3, 4, 5, 6, 7]\n with DB() as db:\n db.write(str(test))\n a = eval(db.read())\n print(a[2:])\n",
"step-5": "import pymysql\r\n\r\n\r\nclass DB:\r\n def __init__(self, host='localhost', port=3306, db_='test', user='wj',\r\n passwd='', charset='utf8'):\r\n self.db = db_\r\n self.conn = pymysql.connect(host=host, port=port, db=db_, user=user, passwd=passwd, charset=charset)\r\n self.cur = self.conn.cursor(cursor=pymysql.cursors.DictCursor)\r\n\r\n def __enter__(self):\r\n return self\r\n\r\n def __exit__(self, exc_type, exc_val, exc_tb):\r\n self.conn.commit()\r\n self.cur.close()\r\n self.conn.close()\r\n\r\n def write(self, data):\r\n sql = \"INSERT INTO {}({}) VALUES ('%s')\".format('data', 'a') % data\r\n self.cur.execute(sql)\r\n self.conn.commit()\r\n\r\n def read(self):\r\n sql = \"SELECT * FROM {}\".format('data')\r\n self.cur.execute(sql)\r\n results = self.cur.fetchall()\r\n return results[0]['a']\r\n\r\n\r\nif __name__ == '__main__':\r\n test = [1, 2, 3, 4, 5, 6, 7]\r\n with DB() as db:\r\n db.write(str(test))\r\n a = eval(db.read())\r\n print(a[2:])\r\n",
"step-ids": [
4,
6,
7,
8,
9
]
}
|
[
4,
6,
7,
8,
9
] |
import sklearn.metrics as metrics
import sklearn.cross_validation as cv
from sklearn.externals import joblib
import MachineLearning.Reinforcement.InternalSQLManager as sqlManager
class ReinforcementLearner:
def __init__(self, clf=None, load=False, clfName=None):
"""
Initialise the Classifier, either from the provided model or from the stored classifier
:param clf: The current classifier, not yet fitted to the data
:param load: Set to True in order to load a previously saved model
"""
if load:
self.clf = joblib.load("model.pkl")
self.reTrain = True
else:
self.clf = clf
self.reTrain = False
if clfName == None:
self.name = self.clf.__class__.__name__
else:
self.name = clfName
def fit(self, X, y, scoring="accuracy", crossval=5):
"""
Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.
:param X: Input Features
:param y: Class Labels
:param scoring: Scoring used for cross validation
:param crossval: Cross Validation number of folds
:return: True if a new model is fit to the data, or a previous model is updated
False if old model when fit to new data performs poorly in comparison to
earlier data
"""
if not self.reTrain: # Train first time
score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)
sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1) # Store the first result of clf
self.clf.fit(X, y)
joblib.dump(self.clf, "model.pkl") # Store the CLF
print("Data Fit")
return True
else:
previousData = sqlManager.selectNewestRecord(self.name) # Check the last entry of CLF
if len(previousData) > 0:
oldSize = previousData[5]
newSize = len(y)
accScore = previousData[3]
score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)
newAccScore = score.mean()
print("Old Accuracy Score : ", accScore)
print("New Accuracy Score : ", newAccScore)
if accScore <= newAccScore: # If new data is benefitial, increases accuracy
print("Reinforcement Learning : Newer model is superior. Saving Model.")
self.clf.fit(X, y)
sqlManager.insertValue(self.name, accScore, newAccScore, oldSize, newSize, 1)
joblib.dump(self.clf, "model.pkl")
return True
else:
print("Reinforcement Learning : Newer model is inferior. Not saving model.")
return False
def predict(self, X):
return self.clf.predict(X)
def __exit__(self, exc_type, exc_val, exc_tb):
sqlManager.close()
if __name__ == "__main__":
pass
|
normal
|
{
"blob_id": "c9be3d25824093528e2bee51c045d05e036daa67",
"index": 9715,
"step-1": "<mask token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n <mask token>\n <mask token>\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n <mask token>\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring='accuracy', crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain:\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1)\n self.clf.fit(X, y)\n joblib.dump(self.clf, 'model.pkl')\n print('Data Fit')\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name)\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n accScore = previousData[3]\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval\n )\n newAccScore = score.mean()\n print('Old Accuracy Score : ', accScore)\n print('New Accuracy Score : ', newAccScore)\n if accScore <= newAccScore:\n print(\n 'Reinforcement Learning : Newer model is superior. Saving Model.'\n )\n self.clf.fit(X, y)\n sqlManager.insertValue(self.name, accScore, newAccScore,\n oldSize, newSize, 1)\n joblib.dump(self.clf, 'model.pkl')\n return True\n else:\n print(\n 'Reinforcement Learning : Newer model is inferior. Not saving model.'\n )\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n if load:\n self.clf = joblib.load('model.pkl')\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring='accuracy', crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain:\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1)\n self.clf.fit(X, y)\n joblib.dump(self.clf, 'model.pkl')\n print('Data Fit')\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name)\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n accScore = previousData[3]\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval\n )\n newAccScore = score.mean()\n print('Old Accuracy Score : ', accScore)\n print('New Accuracy Score : ', newAccScore)\n if accScore <= newAccScore:\n print(\n 'Reinforcement Learning : Newer model is superior. Saving Model.'\n )\n self.clf.fit(X, y)\n sqlManager.insertValue(self.name, accScore, newAccScore,\n oldSize, newSize, 1)\n joblib.dump(self.clf, 'model.pkl')\n return True\n else:\n print(\n 'Reinforcement Learning : Newer model is inferior. Not saving model.'\n )\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\n\nif __name__ == '__main__':\n pass\n",
"step-5": "import sklearn.metrics as metrics\nimport sklearn.cross_validation as cv\nfrom sklearn.externals import joblib\nimport MachineLearning.Reinforcement.InternalSQLManager as sqlManager\n\nclass ReinforcementLearner:\n\n def __init__(self, clf=None, load=False, clfName=None):\n \"\"\"\n Initialise the Classifier, either from the provided model or from the stored classifier\n\n :param clf: The current classifier, not yet fitted to the data\n :param load: Set to True in order to load a previously saved model\n \"\"\"\n\n if load:\n self.clf = joblib.load(\"model.pkl\")\n self.reTrain = True\n else:\n self.clf = clf\n self.reTrain = False\n\n if clfName == None:\n self.name = self.clf.__class__.__name__\n else:\n self.name = clfName\n\n def fit(self, X, y, scoring=\"accuracy\", crossval=5):\n \"\"\"\n Fit the Reinforcement classifier with data, either adding to previous previous data or learning for first time.\n\n :param X: Input Features\n :param y: Class Labels\n :param scoring: Scoring used for cross validation\n :param crossval: Cross Validation number of folds\n :return: True if a new model is fit to the data, or a previous model is updated\n False if old model when fit to new data performs poorly in comparison to\n earlier data\n \"\"\"\n if not self.reTrain: # Train first time\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n\n sqlManager.insertValue(self.name, 0.0, score.mean(), 0, len(y), 1) # Store the first result of clf\n self.clf.fit(X, y)\n\n joblib.dump(self.clf, \"model.pkl\") # Store the CLF\n print(\"Data Fit\")\n return True\n else:\n previousData = sqlManager.selectNewestRecord(self.name) # Check the last entry of CLF\n if len(previousData) > 0:\n oldSize = previousData[5]\n newSize = len(y)\n\n accScore = previousData[3]\n\n score = cv.cross_val_score(self.clf, X, y, scoring, cv=crossval)\n newAccScore = score.mean()\n print(\"Old Accuracy Score : \", accScore)\n print(\"New Accuracy Score : \", newAccScore)\n\n if accScore <= newAccScore: # If new data is benefitial, increases accuracy\n print(\"Reinforcement Learning : Newer model is superior. Saving Model.\")\n self.clf.fit(X, y)\n\n sqlManager.insertValue(self.name, accScore, newAccScore, oldSize, newSize, 1)\n joblib.dump(self.clf, \"model.pkl\")\n return True\n else:\n print(\"Reinforcement Learning : Newer model is inferior. Not saving model.\")\n return False\n\n def predict(self, X):\n return self.clf.predict(X)\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n sqlManager.close()\n\nif __name__ == \"__main__\":\n pass\n\n",
"step-ids": [
3,
4,
5,
6,
8
]
}
|
[
3,
4,
5,
6,
8
] |
#!/usr/bin/env python
"""
##############################################################################
Software Package Risk Analysis Development Environment Specific Work Book View
##############################################################################
"""
# -*- coding: utf-8 -*-
#
# rtk.software.__gui.gtk.DevelopmentEnvironment.py is part of The RTK
# Project
#
# All rights reserved.
import sys
# Import modules for localization support.
import gettext
import locale
# Modules required for the GUI.
try:
import pygtk
pygtk.require('2.0')
except ImportError:
sys.exit(1)
try:
import gtk
except ImportError:
sys.exit(1)
try:
import gtk.glade
except ImportError:
sys.exit(1)
# Import other RTK modules.
try:
import Configuration
import gui.gtk.Widgets as Widgets
except ImportError:
import rtk.Configuration as Configuration
import rtk.gui.gtk.Widgets as Widgets
__author__ = 'Andrew Rowland'
__email__ = '[email protected]'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2007 - 2015 Andrew "weibullguy" Rowland'
try:
locale.setlocale(locale.LC_ALL, Configuration.LOCALE)
except locale.Error:
locale.setlocale(locale.LC_ALL, '')
_ = gettext.gettext
class RiskAnalysis(gtk.VPaned):
"""
The Work Book view for analyzing and displaying the risk associated with
the development environment. The attributes of a development environment
Work Book view are:
:ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.
:ivar _software_model: the :py:class:`rtk.software.Software.Model` to
display.
"""
def __init__(self):
"""
Method to initialize the development environment risk analysis
questions Work Book page.
"""
gtk.VPaned.__init__(self)
# Define private dictionary attributes.
# Define private list attributes.
self._lst_handler_id = []
# Define private scalar attributes.
self._software_model = None
# Define public dictionary attributes.
# Define public list attributes.
# Define public scalar attributes.
self.chkDevEnvQ1 = Widgets.make_check_button()
self.chkDevEnvQ2 = Widgets.make_check_button()
self.chkDevEnvQ3 = Widgets.make_check_button()
self.chkDevEnvQ4 = Widgets.make_check_button()
self.chkDevEnvQ5 = Widgets.make_check_button()
self.chkDevEnvQ6 = Widgets.make_check_button()
self.chkDevEnvQ7 = Widgets.make_check_button()
self.chkDevEnvQ8 = Widgets.make_check_button()
self.chkDevEnvQ9 = Widgets.make_check_button()
self.chkDevEnvQ10 = Widgets.make_check_button()
self.chkDevEnvQ11 = Widgets.make_check_button()
self.chkDevEnvQ12 = Widgets.make_check_button()
self.chkDevEnvQ13 = Widgets.make_check_button()
self.chkDevEnvQ14 = Widgets.make_check_button()
self.chkDevEnvQ15 = Widgets.make_check_button()
self.chkDevEnvQ16 = Widgets.make_check_button()
self.chkDevEnvQ17 = Widgets.make_check_button()
self.chkDevEnvQ18 = Widgets.make_check_button()
self.chkDevEnvQ19 = Widgets.make_check_button()
self.chkDevEnvQ20 = Widgets.make_check_button()
self.chkDevEnvQ21 = Widgets.make_check_button()
self.chkDevEnvQ22 = Widgets.make_check_button()
self.chkDevEnvQ23 = Widgets.make_check_button()
self.chkDevEnvQ24 = Widgets.make_check_button()
self.chkDevEnvQ25 = Widgets.make_check_button()
self.chkDevEnvQ26 = Widgets.make_check_button()
self.chkDevEnvQ27 = Widgets.make_check_button()
self.chkDevEnvQ28 = Widgets.make_check_button()
self.chkDevEnvQ29 = Widgets.make_check_button()
self.chkDevEnvQ30 = Widgets.make_check_button()
self.chkDevEnvQ31 = Widgets.make_check_button()
self.chkDevEnvQ32 = Widgets.make_check_button()
self.chkDevEnvQ33 = Widgets.make_check_button()
self.chkDevEnvQ34 = Widgets.make_check_button()
self.chkDevEnvQ35 = Widgets.make_check_button()
self.chkDevEnvQ36 = Widgets.make_check_button()
self.chkDevEnvQ37 = Widgets.make_check_button()
self.chkDevEnvQ38 = Widgets.make_check_button()
self.chkDevEnvQ39 = Widgets.make_check_button()
self.chkDevEnvQ40 = Widgets.make_check_button()
self.chkDevEnvQ41 = Widgets.make_check_button()
self.chkDevEnvQ42 = Widgets.make_check_button()
self.chkDevEnvQ43 = Widgets.make_check_button()
# Connect gtk.Widget() signals to callback methods.
self._lst_handler_id.append(
self.chkDevEnvQ1.connect('toggled', self._on_toggled, 0))
self._lst_handler_id.append(
self.chkDevEnvQ2.connect('toggled', self._on_toggled, 1))
self._lst_handler_id.append(
self.chkDevEnvQ3.connect('toggled', self._on_toggled, 2))
self._lst_handler_id.append(
self.chkDevEnvQ4.connect('toggled', self._on_toggled, 3))
self._lst_handler_id.append(
self.chkDevEnvQ5.connect('toggled', self._on_toggled, 4))
self._lst_handler_id.append(
self.chkDevEnvQ6.connect('toggled', self._on_toggled, 5))
self._lst_handler_id.append(
self.chkDevEnvQ7.connect('toggled', self._on_toggled, 6))
self._lst_handler_id.append(
self.chkDevEnvQ8.connect('toggled', self._on_toggled, 7))
self._lst_handler_id.append(
self.chkDevEnvQ9.connect('toggled', self._on_toggled, 8))
self._lst_handler_id.append(
self.chkDevEnvQ10.connect('toggled', self._on_toggled, 9))
self._lst_handler_id.append(
self.chkDevEnvQ11.connect('toggled', self._on_toggled, 10))
self._lst_handler_id.append(
self.chkDevEnvQ12.connect('toggled', self._on_toggled, 11))
self._lst_handler_id.append(
self.chkDevEnvQ13.connect('toggled', self._on_toggled, 12))
self._lst_handler_id.append(
self.chkDevEnvQ14.connect('toggled', self._on_toggled, 13))
self._lst_handler_id.append(
self.chkDevEnvQ15.connect('toggled', self._on_toggled, 14))
self._lst_handler_id.append(
self.chkDevEnvQ16.connect('toggled', self._on_toggled, 15))
self._lst_handler_id.append(
self.chkDevEnvQ17.connect('toggled', self._on_toggled, 16))
self._lst_handler_id.append(
self.chkDevEnvQ18.connect('toggled', self._on_toggled, 17))
self._lst_handler_id.append(
self.chkDevEnvQ19.connect('toggled', self._on_toggled, 18))
self._lst_handler_id.append(
self.chkDevEnvQ20.connect('toggled', self._on_toggled, 19))
self._lst_handler_id.append(
self.chkDevEnvQ21.connect('toggled', self._on_toggled, 20))
self._lst_handler_id.append(
self.chkDevEnvQ22.connect('toggled', self._on_toggled, 21))
self._lst_handler_id.append(
self.chkDevEnvQ23.connect('toggled', self._on_toggled, 22))
self._lst_handler_id.append(
self.chkDevEnvQ24.connect('toggled', self._on_toggled, 23))
self._lst_handler_id.append(
self.chkDevEnvQ25.connect('toggled', self._on_toggled, 24))
self._lst_handler_id.append(
self.chkDevEnvQ26.connect('toggled', self._on_toggled, 25))
self._lst_handler_id.append(
self.chkDevEnvQ27.connect('toggled', self._on_toggled, 26))
self._lst_handler_id.append(
self.chkDevEnvQ28.connect('toggled', self._on_toggled, 27))
self._lst_handler_id.append(
self.chkDevEnvQ29.connect('toggled', self._on_toggled, 28))
self._lst_handler_id.append(
self.chkDevEnvQ30.connect('toggled', self._on_toggled, 29))
self._lst_handler_id.append(
self.chkDevEnvQ31.connect('toggled', self._on_toggled, 30))
self._lst_handler_id.append(
self.chkDevEnvQ32.connect('toggled', self._on_toggled, 31))
self._lst_handler_id.append(
self.chkDevEnvQ33.connect('toggled', self._on_toggled, 32))
self._lst_handler_id.append(
self.chkDevEnvQ34.connect('toggled', self._on_toggled, 33))
self._lst_handler_id.append(
self.chkDevEnvQ35.connect('toggled', self._on_toggled, 34))
self._lst_handler_id.append(
self.chkDevEnvQ36.connect('toggled', self._on_toggled, 35))
self._lst_handler_id.append(
self.chkDevEnvQ37.connect('toggled', self._on_toggled, 36))
self._lst_handler_id.append(
self.chkDevEnvQ38.connect('toggled', self._on_toggled, 37))
self._lst_handler_id.append(
self.chkDevEnvQ39.connect('toggled', self._on_toggled, 38))
self._lst_handler_id.append(
self.chkDevEnvQ40.connect('toggled', self._on_toggled, 39))
self._lst_handler_id.append(
self.chkDevEnvQ41.connect('toggled', self._on_toggled, 40))
self._lst_handler_id.append(
self.chkDevEnvQ42.connect('toggled', self._on_toggled, 41))
self._lst_handler_id.append(
self.chkDevEnvQ43.connect('toggled', self._on_toggled, 42))
def create_risk_analysis_page(self, notebook):
"""
Method to create the development environment risk analysis page and add
it to the risk analysis gtk.Notebook().
:param gtk.Notebook notebook: the gtk.Notebook() instance that will
hold the development environment risk
analysis questions.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
# Build-up the containers for the tab. #
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #
_hpaned = gtk.HPaned()
self.pack1(_hpaned, resize=True, shrink=True)
# Create the organizational risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Organization"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(u"1. There are separate design and coding "
u"organizations."),
_(u"2. There is an independent software test "
u"organization."),
_(u"3. There is an independent software quality "
u"assurance organization."),
_(u"4. There is an independent software configuration "
u"management organization."),
_(u"5. There is an independent software verification "
u"and validation organization."),
_(u"6. A structured programming team will develop the "
u"software."),
_(u"7. The educational level of the software team members "
u"is above average."),
_(u"8. The experience level of the software team members "
u"is above average.")]
(_x_pos,
_y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_x_pos += 125
_fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])
# Create the methods risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Methods"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u"1. Standards are defined and will be enforced."),
_(u"2. Software will be developed using a higher order "
u"language."),
_(u"3. The development process will include formal "
u"reviews (PDR, CDR, etc.)."),
_(u"4. The development process will include frequent "
u"walkthroughs."),
_(u"5. Development will take a top-down and "
u"structured approach."),
_(u"6. Unit development folders will be used."),
_(u"7. A software development library will be used."),
_(u"8. A formal change and error reporting process "
u"will be used."),
_(u"9. Progress and status will routinely be "
u"reported.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])
# Create the documentation risk pane.
_hpaned = gtk.HPaned()
self.pack2(_hpaned, resize=True, shrink=True)
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Documentation"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack1(_frame, True, True)
_labels = [_(u" 1. System requirements specifications will be "
u"documented."),
_(u" 2. Software requirements specifications will be "
u"documented."),
_(u" 3. Interface design specifications will be "
u"documented."),
_(u" 4. Software design specification will be "
u"documented."),
_(u" 5. Test plans, procedures, and reports will be "
u"documented."),
_(u" 6. The software development plan will be "
u"documented."),
_(u" 7. The software quality assurance plan will be "
u"documented."),
_(u" 8. The software configuration management plan will "
u"be documented."),
_(u" 9. A requirements traceability matrix will be "
u"used."),
_(u"10. The software version description will be "
u"documented."),
_(u"11. All software discrepancies will be "
u"documented.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])
# Create the tools and test techniques risk pane.
_fixed = gtk.Fixed()
_scrollwindow = gtk.ScrolledWindow()
_scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
_scrollwindow.add_with_viewport(_fixed)
_frame = Widgets.make_frame(label=_(u"Tools & Test Techniques"))
_frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)
_frame.add(_scrollwindow)
_hpaned.pack2(_frame, True, True)
_labels = [_(u" 1. The software language requirements will be "
u"specified."),
_(u" 2. Formal program design language will be used."),
_(u" 3. Program design graphical techniques "
u"(flowcharts, HIPO, etc.) will be used."),
_(u" 4. Simulation/emulation tools will be used."),
_(u" 5. Configuration management tools will be used."),
_(u" 6. A code auditing tool will be used."),
_(u" 7. A data flow analyzer will be used."),
_(u" 8. A programmer's workbench will be used."),
_(u" 9. Measurement tools will be used."),
_(u"10. Software code reviews will be used."),
_(u"11. Software branch testing will be used."),
_(u"12. Random testing will be used."),
_(u"13. Functional testing will be used."),
_(u"14. Error and anomaly detection testing will be "
u"used."),
_(u"15. Structure analysis will be used.")]
(__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)
_fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])
_fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])
_fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])
_fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])
_fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])
_fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])
_fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])
_fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])
_fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])
_fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])
_fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])
_fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])
_fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])
_fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])
_fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])
_label = gtk.Label()
_label.set_markup("<span weight='bold'>" +
_(u"Development\nEnvironment") +
"</span>")
_label.set_alignment(xalign=0.5, yalign=0.5)
_label.set_justify(gtk.JUSTIFY_CENTER)
_label.set_angle(0)
_label.show_all()
_label.set_tooltip_text(_(u"Assesses risk due to the development "
u"environment."))
notebook.insert_page(self, tab_label=_label, position=-1)
return False
def load(self, model):
"""
Method to load the Development Environment Risk Analysis answers.
:param `rtk.software.Software` model: the Software data model to load
the gtk.ToggleButton() from.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
self._software_model = model
self.chkDevEnvQ1.set_active(model.lst_development[0])
self.chkDevEnvQ2.set_active(model.lst_development[1])
self.chkDevEnvQ3.set_active(model.lst_development[2])
self.chkDevEnvQ4.set_active(model.lst_development[3])
self.chkDevEnvQ5.set_active(model.lst_development[4])
self.chkDevEnvQ6.set_active(model.lst_development[5])
self.chkDevEnvQ7.set_active(model.lst_development[6])
self.chkDevEnvQ8.set_active(model.lst_development[7])
self.chkDevEnvQ9.set_active(model.lst_development[8])
self.chkDevEnvQ10.set_active(model.lst_development[9])
self.chkDevEnvQ11.set_active(model.lst_development[10])
self.chkDevEnvQ12.set_active(model.lst_development[11])
self.chkDevEnvQ13.set_active(model.lst_development[12])
self.chkDevEnvQ14.set_active(model.lst_development[13])
self.chkDevEnvQ15.set_active(model.lst_development[14])
self.chkDevEnvQ16.set_active(model.lst_development[15])
self.chkDevEnvQ17.set_active(model.lst_development[16])
self.chkDevEnvQ18.set_active(model.lst_development[17])
self.chkDevEnvQ19.set_active(model.lst_development[18])
self.chkDevEnvQ20.set_active(model.lst_development[19])
self.chkDevEnvQ21.set_active(model.lst_development[20])
self.chkDevEnvQ22.set_active(model.lst_development[21])
self.chkDevEnvQ23.set_active(model.lst_development[22])
self.chkDevEnvQ24.set_active(model.lst_development[23])
self.chkDevEnvQ25.set_active(model.lst_development[24])
self.chkDevEnvQ26.set_active(model.lst_development[25])
self.chkDevEnvQ27.set_active(model.lst_development[26])
self.chkDevEnvQ28.set_active(model.lst_development[27])
self.chkDevEnvQ29.set_active(model.lst_development[28])
self.chkDevEnvQ30.set_active(model.lst_development[29])
self.chkDevEnvQ31.set_active(model.lst_development[30])
self.chkDevEnvQ32.set_active(model.lst_development[31])
self.chkDevEnvQ33.set_active(model.lst_development[32])
self.chkDevEnvQ34.set_active(model.lst_development[33])
self.chkDevEnvQ35.set_active(model.lst_development[34])
self.chkDevEnvQ36.set_active(model.lst_development[35])
self.chkDevEnvQ37.set_active(model.lst_development[36])
self.chkDevEnvQ38.set_active(model.lst_development[37])
self.chkDevEnvQ39.set_active(model.lst_development[38])
self.chkDevEnvQ40.set_active(model.lst_development[39])
self.chkDevEnvQ41.set_active(model.lst_development[40])
self.chkDevEnvQ42.set_active(model.lst_development[41])
self.chkDevEnvQ43.set_active(model.lst_development[42])
return False
def _on_toggled(self, check, index):
"""
Callback method for gtk.CheckButton() 'toggled' event.
:param gtk.CheckButton check: the gtk.CheckButton() that called this
method.
:param int index: the index of the Development Environment question
associated with the gtk.CheckButton() that was
toggled.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
check.handler_block(self._lst_handler_id[index])
self._software_model.lst_development[index] = int(check.get_active())
check.handler_unblock(self._lst_handler_id[index])
return False
|
normal
|
{
"blob_id": "327371d373819273a2f77f63e0cedee6950dbc46",
"index": 976,
"step-1": "<mask token>\n\n\nclass RiskAnalysis(gtk.VPaned):\n <mask token>\n <mask token>\n\n def create_risk_analysis_page(self, notebook):\n \"\"\"\n Method to create the development environment risk analysis page and add\n it to the risk analysis gtk.Notebook().\n\n :param gtk.Notebook notebook: the gtk.Notebook() instance that will\n hold the development environment risk\n analysis questions.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n _hpaned = gtk.HPaned()\n self.pack1(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Organization'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u'1. There are separate design and coding organizations.'), _(\n u'2. There is an independent software test organization.'), _(\n u'3. There is an independent software quality assurance organization.'\n ), _(\n u'4. There is an independent software configuration management organization.'\n ), _(\n u'5. There is an independent software verification and validation organization.'\n ), _(\n u'6. A structured programming team will develop the software.'),\n _(\n u'7. The educational level of the software team members is above average.'\n ), _(\n u'8. The experience level of the software team members is above average.'\n )]\n _x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _x_pos += 125\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Methods'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(u'1. Standards are defined and will be enforced.'), _(\n u'2. Software will be developed using a higher order language.'\n ), _(\n u'3. The development process will include formal reviews (PDR, CDR, etc.).'\n ), _(\n u'4. The development process will include frequent walkthroughs.'\n ), _(\n u'5. Development will take a top-down and structured approach.'\n ), _(u'6. Unit development folders will be used.'), _(\n u'7. A software development library will be used.'), _(\n u'8. A formal change and error reporting process will be used.'\n ), _(u'9. Progress and status will routinely be reported.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\n _hpaned = gtk.HPaned()\n self.pack2(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Documentation'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u' 1. System requirements specifications will be documented.'),\n _(\n u' 2. Software requirements specifications will be documented.'\n ), _(u' 3. Interface design specifications will be documented.'\n ), _(u' 4. Software design specification will be documented.'),\n _(\n u' 5. Test plans, procedures, and reports will be documented.'),\n _(u' 6. The software development plan will be documented.'), _(\n u' 7. The software quality assurance plan will be documented.'),\n _(\n u' 8. The software configuration management plan will be documented.'\n ), _(u' 9. A requirements traceability matrix will be used.'),\n _(u'10. The software version description will be documented.'),\n _(u'11. All software discrepancies will be documented.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(\n u' 1. The software language requirements will be specified.'),\n _(u' 2. Formal program design language will be used.'), _(\n u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'\n ), _(u' 4. Simulation/emulation tools will be used.'), _(\n u' 5. Configuration management tools will be used.'), _(\n u' 6. A code auditing tool will be used.'), _(\n u' 7. A data flow analyzer will be used.'), _(\n u\" 8. A programmer's workbench will be used.\"), _(\n u' 9. Measurement tools will be used.'), _(\n u'10. Software code reviews will be used.'), _(\n u'11. Software branch testing will be used.'), _(\n u'12. Random testing will be used.'), _(\n u'13. Functional testing will be used.'), _(\n u'14. Error and anomaly detection testing will be used.'), _(\n u'15. Structure analysis will be used.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\n _label = gtk.Label()\n _label.set_markup(\"<span weight='bold'>\" + _(\n u'Development\\nEnvironment') + '</span>')\n _label.set_alignment(xalign=0.5, yalign=0.5)\n _label.set_justify(gtk.JUSTIFY_CENTER)\n _label.set_angle(0)\n _label.show_all()\n _label.set_tooltip_text(_(\n u'Assesses risk due to the development environment.'))\n notebook.insert_page(self, tab_label=_label, position=-1)\n return False\n\n def load(self, model):\n \"\"\"\n Method to load the Development Environment Risk Analysis answers.\n\n :param `rtk.software.Software` model: the Software data model to load\n the gtk.ToggleButton() from.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n self._software_model = model\n self.chkDevEnvQ1.set_active(model.lst_development[0])\n self.chkDevEnvQ2.set_active(model.lst_development[1])\n self.chkDevEnvQ3.set_active(model.lst_development[2])\n self.chkDevEnvQ4.set_active(model.lst_development[3])\n self.chkDevEnvQ5.set_active(model.lst_development[4])\n self.chkDevEnvQ6.set_active(model.lst_development[5])\n self.chkDevEnvQ7.set_active(model.lst_development[6])\n self.chkDevEnvQ8.set_active(model.lst_development[7])\n self.chkDevEnvQ9.set_active(model.lst_development[8])\n self.chkDevEnvQ10.set_active(model.lst_development[9])\n self.chkDevEnvQ11.set_active(model.lst_development[10])\n self.chkDevEnvQ12.set_active(model.lst_development[11])\n self.chkDevEnvQ13.set_active(model.lst_development[12])\n self.chkDevEnvQ14.set_active(model.lst_development[13])\n self.chkDevEnvQ15.set_active(model.lst_development[14])\n self.chkDevEnvQ16.set_active(model.lst_development[15])\n self.chkDevEnvQ17.set_active(model.lst_development[16])\n self.chkDevEnvQ18.set_active(model.lst_development[17])\n self.chkDevEnvQ19.set_active(model.lst_development[18])\n self.chkDevEnvQ20.set_active(model.lst_development[19])\n self.chkDevEnvQ21.set_active(model.lst_development[20])\n self.chkDevEnvQ22.set_active(model.lst_development[21])\n self.chkDevEnvQ23.set_active(model.lst_development[22])\n self.chkDevEnvQ24.set_active(model.lst_development[23])\n self.chkDevEnvQ25.set_active(model.lst_development[24])\n self.chkDevEnvQ26.set_active(model.lst_development[25])\n self.chkDevEnvQ27.set_active(model.lst_development[26])\n self.chkDevEnvQ28.set_active(model.lst_development[27])\n self.chkDevEnvQ29.set_active(model.lst_development[28])\n self.chkDevEnvQ30.set_active(model.lst_development[29])\n self.chkDevEnvQ31.set_active(model.lst_development[30])\n self.chkDevEnvQ32.set_active(model.lst_development[31])\n self.chkDevEnvQ33.set_active(model.lst_development[32])\n self.chkDevEnvQ34.set_active(model.lst_development[33])\n self.chkDevEnvQ35.set_active(model.lst_development[34])\n self.chkDevEnvQ36.set_active(model.lst_development[35])\n self.chkDevEnvQ37.set_active(model.lst_development[36])\n self.chkDevEnvQ38.set_active(model.lst_development[37])\n self.chkDevEnvQ39.set_active(model.lst_development[38])\n self.chkDevEnvQ40.set_active(model.lst_development[39])\n self.chkDevEnvQ41.set_active(model.lst_development[40])\n self.chkDevEnvQ42.set_active(model.lst_development[41])\n self.chkDevEnvQ43.set_active(model.lst_development[42])\n return False\n\n def _on_toggled(self, check, index):\n \"\"\"\n Callback method for gtk.CheckButton() 'toggled' event.\n\n :param gtk.CheckButton check: the gtk.CheckButton() that called this\n method.\n :param int index: the index of the Development Environment question\n associated with the gtk.CheckButton() that was\n toggled.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n check.handler_block(self._lst_handler_id[index])\n self._software_model.lst_development[index] = int(check.get_active())\n check.handler_unblock(self._lst_handler_id[index])\n return False\n",
"step-2": "<mask token>\n\n\nclass RiskAnalysis(gtk.VPaned):\n <mask token>\n\n def __init__(self):\n \"\"\"\n Method to initialize the development environment risk analysis\n questions Work Book page.\n \"\"\"\n gtk.VPaned.__init__(self)\n self._lst_handler_id = []\n self._software_model = None\n self.chkDevEnvQ1 = Widgets.make_check_button()\n self.chkDevEnvQ2 = Widgets.make_check_button()\n self.chkDevEnvQ3 = Widgets.make_check_button()\n self.chkDevEnvQ4 = Widgets.make_check_button()\n self.chkDevEnvQ5 = Widgets.make_check_button()\n self.chkDevEnvQ6 = Widgets.make_check_button()\n self.chkDevEnvQ7 = Widgets.make_check_button()\n self.chkDevEnvQ8 = Widgets.make_check_button()\n self.chkDevEnvQ9 = Widgets.make_check_button()\n self.chkDevEnvQ10 = Widgets.make_check_button()\n self.chkDevEnvQ11 = Widgets.make_check_button()\n self.chkDevEnvQ12 = Widgets.make_check_button()\n self.chkDevEnvQ13 = Widgets.make_check_button()\n self.chkDevEnvQ14 = Widgets.make_check_button()\n self.chkDevEnvQ15 = Widgets.make_check_button()\n self.chkDevEnvQ16 = Widgets.make_check_button()\n self.chkDevEnvQ17 = Widgets.make_check_button()\n self.chkDevEnvQ18 = Widgets.make_check_button()\n self.chkDevEnvQ19 = Widgets.make_check_button()\n self.chkDevEnvQ20 = Widgets.make_check_button()\n self.chkDevEnvQ21 = Widgets.make_check_button()\n self.chkDevEnvQ22 = Widgets.make_check_button()\n self.chkDevEnvQ23 = Widgets.make_check_button()\n self.chkDevEnvQ24 = Widgets.make_check_button()\n self.chkDevEnvQ25 = Widgets.make_check_button()\n self.chkDevEnvQ26 = Widgets.make_check_button()\n self.chkDevEnvQ27 = Widgets.make_check_button()\n self.chkDevEnvQ28 = Widgets.make_check_button()\n self.chkDevEnvQ29 = Widgets.make_check_button()\n self.chkDevEnvQ30 = Widgets.make_check_button()\n self.chkDevEnvQ31 = Widgets.make_check_button()\n self.chkDevEnvQ32 = Widgets.make_check_button()\n self.chkDevEnvQ33 = Widgets.make_check_button()\n self.chkDevEnvQ34 = Widgets.make_check_button()\n self.chkDevEnvQ35 = Widgets.make_check_button()\n self.chkDevEnvQ36 = Widgets.make_check_button()\n self.chkDevEnvQ37 = Widgets.make_check_button()\n self.chkDevEnvQ38 = Widgets.make_check_button()\n self.chkDevEnvQ39 = Widgets.make_check_button()\n self.chkDevEnvQ40 = Widgets.make_check_button()\n self.chkDevEnvQ41 = Widgets.make_check_button()\n self.chkDevEnvQ42 = Widgets.make_check_button()\n self.chkDevEnvQ43 = Widgets.make_check_button()\n self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',\n self._on_toggled, 0))\n self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',\n self._on_toggled, 1))\n self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',\n self._on_toggled, 2))\n self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',\n self._on_toggled, 3))\n self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',\n self._on_toggled, 4))\n self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',\n self._on_toggled, 5))\n self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',\n self._on_toggled, 6))\n self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',\n self._on_toggled, 7))\n self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',\n self._on_toggled, 8))\n self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',\n self._on_toggled, 9))\n self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',\n self._on_toggled, 10))\n self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',\n self._on_toggled, 11))\n self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',\n self._on_toggled, 12))\n self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',\n self._on_toggled, 13))\n self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',\n self._on_toggled, 14))\n self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',\n self._on_toggled, 15))\n self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',\n self._on_toggled, 16))\n self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',\n self._on_toggled, 17))\n self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',\n self._on_toggled, 18))\n self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',\n self._on_toggled, 19))\n self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',\n self._on_toggled, 20))\n self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',\n self._on_toggled, 21))\n self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',\n self._on_toggled, 22))\n self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',\n self._on_toggled, 23))\n self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',\n self._on_toggled, 24))\n self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',\n self._on_toggled, 25))\n self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',\n self._on_toggled, 26))\n self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',\n self._on_toggled, 27))\n self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',\n self._on_toggled, 28))\n self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',\n self._on_toggled, 29))\n self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',\n self._on_toggled, 30))\n self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',\n self._on_toggled, 31))\n self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',\n self._on_toggled, 32))\n self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',\n self._on_toggled, 33))\n self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',\n self._on_toggled, 34))\n self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',\n self._on_toggled, 35))\n self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',\n self._on_toggled, 36))\n self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',\n self._on_toggled, 37))\n self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',\n self._on_toggled, 38))\n self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',\n self._on_toggled, 39))\n self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',\n self._on_toggled, 40))\n self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',\n self._on_toggled, 41))\n self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',\n self._on_toggled, 42))\n\n def create_risk_analysis_page(self, notebook):\n \"\"\"\n Method to create the development environment risk analysis page and add\n it to the risk analysis gtk.Notebook().\n\n :param gtk.Notebook notebook: the gtk.Notebook() instance that will\n hold the development environment risk\n analysis questions.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n _hpaned = gtk.HPaned()\n self.pack1(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Organization'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u'1. There are separate design and coding organizations.'), _(\n u'2. There is an independent software test organization.'), _(\n u'3. There is an independent software quality assurance organization.'\n ), _(\n u'4. There is an independent software configuration management organization.'\n ), _(\n u'5. There is an independent software verification and validation organization.'\n ), _(\n u'6. A structured programming team will develop the software.'),\n _(\n u'7. The educational level of the software team members is above average.'\n ), _(\n u'8. The experience level of the software team members is above average.'\n )]\n _x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _x_pos += 125\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Methods'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(u'1. Standards are defined and will be enforced.'), _(\n u'2. Software will be developed using a higher order language.'\n ), _(\n u'3. The development process will include formal reviews (PDR, CDR, etc.).'\n ), _(\n u'4. The development process will include frequent walkthroughs.'\n ), _(\n u'5. Development will take a top-down and structured approach.'\n ), _(u'6. Unit development folders will be used.'), _(\n u'7. A software development library will be used.'), _(\n u'8. A formal change and error reporting process will be used.'\n ), _(u'9. Progress and status will routinely be reported.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\n _hpaned = gtk.HPaned()\n self.pack2(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Documentation'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u' 1. System requirements specifications will be documented.'),\n _(\n u' 2. Software requirements specifications will be documented.'\n ), _(u' 3. Interface design specifications will be documented.'\n ), _(u' 4. Software design specification will be documented.'),\n _(\n u' 5. Test plans, procedures, and reports will be documented.'),\n _(u' 6. The software development plan will be documented.'), _(\n u' 7. The software quality assurance plan will be documented.'),\n _(\n u' 8. The software configuration management plan will be documented.'\n ), _(u' 9. A requirements traceability matrix will be used.'),\n _(u'10. The software version description will be documented.'),\n _(u'11. All software discrepancies will be documented.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(\n u' 1. The software language requirements will be specified.'),\n _(u' 2. Formal program design language will be used.'), _(\n u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'\n ), _(u' 4. Simulation/emulation tools will be used.'), _(\n u' 5. Configuration management tools will be used.'), _(\n u' 6. A code auditing tool will be used.'), _(\n u' 7. A data flow analyzer will be used.'), _(\n u\" 8. A programmer's workbench will be used.\"), _(\n u' 9. Measurement tools will be used.'), _(\n u'10. Software code reviews will be used.'), _(\n u'11. Software branch testing will be used.'), _(\n u'12. Random testing will be used.'), _(\n u'13. Functional testing will be used.'), _(\n u'14. Error and anomaly detection testing will be used.'), _(\n u'15. Structure analysis will be used.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\n _label = gtk.Label()\n _label.set_markup(\"<span weight='bold'>\" + _(\n u'Development\\nEnvironment') + '</span>')\n _label.set_alignment(xalign=0.5, yalign=0.5)\n _label.set_justify(gtk.JUSTIFY_CENTER)\n _label.set_angle(0)\n _label.show_all()\n _label.set_tooltip_text(_(\n u'Assesses risk due to the development environment.'))\n notebook.insert_page(self, tab_label=_label, position=-1)\n return False\n\n def load(self, model):\n \"\"\"\n Method to load the Development Environment Risk Analysis answers.\n\n :param `rtk.software.Software` model: the Software data model to load\n the gtk.ToggleButton() from.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n self._software_model = model\n self.chkDevEnvQ1.set_active(model.lst_development[0])\n self.chkDevEnvQ2.set_active(model.lst_development[1])\n self.chkDevEnvQ3.set_active(model.lst_development[2])\n self.chkDevEnvQ4.set_active(model.lst_development[3])\n self.chkDevEnvQ5.set_active(model.lst_development[4])\n self.chkDevEnvQ6.set_active(model.lst_development[5])\n self.chkDevEnvQ7.set_active(model.lst_development[6])\n self.chkDevEnvQ8.set_active(model.lst_development[7])\n self.chkDevEnvQ9.set_active(model.lst_development[8])\n self.chkDevEnvQ10.set_active(model.lst_development[9])\n self.chkDevEnvQ11.set_active(model.lst_development[10])\n self.chkDevEnvQ12.set_active(model.lst_development[11])\n self.chkDevEnvQ13.set_active(model.lst_development[12])\n self.chkDevEnvQ14.set_active(model.lst_development[13])\n self.chkDevEnvQ15.set_active(model.lst_development[14])\n self.chkDevEnvQ16.set_active(model.lst_development[15])\n self.chkDevEnvQ17.set_active(model.lst_development[16])\n self.chkDevEnvQ18.set_active(model.lst_development[17])\n self.chkDevEnvQ19.set_active(model.lst_development[18])\n self.chkDevEnvQ20.set_active(model.lst_development[19])\n self.chkDevEnvQ21.set_active(model.lst_development[20])\n self.chkDevEnvQ22.set_active(model.lst_development[21])\n self.chkDevEnvQ23.set_active(model.lst_development[22])\n self.chkDevEnvQ24.set_active(model.lst_development[23])\n self.chkDevEnvQ25.set_active(model.lst_development[24])\n self.chkDevEnvQ26.set_active(model.lst_development[25])\n self.chkDevEnvQ27.set_active(model.lst_development[26])\n self.chkDevEnvQ28.set_active(model.lst_development[27])\n self.chkDevEnvQ29.set_active(model.lst_development[28])\n self.chkDevEnvQ30.set_active(model.lst_development[29])\n self.chkDevEnvQ31.set_active(model.lst_development[30])\n self.chkDevEnvQ32.set_active(model.lst_development[31])\n self.chkDevEnvQ33.set_active(model.lst_development[32])\n self.chkDevEnvQ34.set_active(model.lst_development[33])\n self.chkDevEnvQ35.set_active(model.lst_development[34])\n self.chkDevEnvQ36.set_active(model.lst_development[35])\n self.chkDevEnvQ37.set_active(model.lst_development[36])\n self.chkDevEnvQ38.set_active(model.lst_development[37])\n self.chkDevEnvQ39.set_active(model.lst_development[38])\n self.chkDevEnvQ40.set_active(model.lst_development[39])\n self.chkDevEnvQ41.set_active(model.lst_development[40])\n self.chkDevEnvQ42.set_active(model.lst_development[41])\n self.chkDevEnvQ43.set_active(model.lst_development[42])\n return False\n\n def _on_toggled(self, check, index):\n \"\"\"\n Callback method for gtk.CheckButton() 'toggled' event.\n\n :param gtk.CheckButton check: the gtk.CheckButton() that called this\n method.\n :param int index: the index of the Development Environment question\n associated with the gtk.CheckButton() that was\n toggled.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n check.handler_block(self._lst_handler_id[index])\n self._software_model.lst_development[index] = int(check.get_active())\n check.handler_unblock(self._lst_handler_id[index])\n return False\n",
"step-3": "<mask token>\ntry:\n import pygtk\n pygtk.require('2.0')\nexcept ImportError:\n sys.exit(1)\ntry:\n import gtk\nexcept ImportError:\n sys.exit(1)\ntry:\n import gtk.glade\nexcept ImportError:\n sys.exit(1)\ntry:\n import Configuration\n import gui.gtk.Widgets as Widgets\nexcept ImportError:\n import rtk.Configuration as Configuration\n import rtk.gui.gtk.Widgets as Widgets\n<mask token>\ntry:\n locale.setlocale(locale.LC_ALL, Configuration.LOCALE)\nexcept locale.Error:\n locale.setlocale(locale.LC_ALL, '')\n<mask token>\n\n\nclass RiskAnalysis(gtk.VPaned):\n \"\"\"\n The Work Book view for analyzing and displaying the risk associated with\n the development environment. The attributes of a development environment\n Work Book view are:\n\n :ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.\n :ivar _software_model: the :py:class:`rtk.software.Software.Model` to\n display.\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Method to initialize the development environment risk analysis\n questions Work Book page.\n \"\"\"\n gtk.VPaned.__init__(self)\n self._lst_handler_id = []\n self._software_model = None\n self.chkDevEnvQ1 = Widgets.make_check_button()\n self.chkDevEnvQ2 = Widgets.make_check_button()\n self.chkDevEnvQ3 = Widgets.make_check_button()\n self.chkDevEnvQ4 = Widgets.make_check_button()\n self.chkDevEnvQ5 = Widgets.make_check_button()\n self.chkDevEnvQ6 = Widgets.make_check_button()\n self.chkDevEnvQ7 = Widgets.make_check_button()\n self.chkDevEnvQ8 = Widgets.make_check_button()\n self.chkDevEnvQ9 = Widgets.make_check_button()\n self.chkDevEnvQ10 = Widgets.make_check_button()\n self.chkDevEnvQ11 = Widgets.make_check_button()\n self.chkDevEnvQ12 = Widgets.make_check_button()\n self.chkDevEnvQ13 = Widgets.make_check_button()\n self.chkDevEnvQ14 = Widgets.make_check_button()\n self.chkDevEnvQ15 = Widgets.make_check_button()\n self.chkDevEnvQ16 = Widgets.make_check_button()\n self.chkDevEnvQ17 = Widgets.make_check_button()\n self.chkDevEnvQ18 = Widgets.make_check_button()\n self.chkDevEnvQ19 = Widgets.make_check_button()\n self.chkDevEnvQ20 = Widgets.make_check_button()\n self.chkDevEnvQ21 = Widgets.make_check_button()\n self.chkDevEnvQ22 = Widgets.make_check_button()\n self.chkDevEnvQ23 = Widgets.make_check_button()\n self.chkDevEnvQ24 = Widgets.make_check_button()\n self.chkDevEnvQ25 = Widgets.make_check_button()\n self.chkDevEnvQ26 = Widgets.make_check_button()\n self.chkDevEnvQ27 = Widgets.make_check_button()\n self.chkDevEnvQ28 = Widgets.make_check_button()\n self.chkDevEnvQ29 = Widgets.make_check_button()\n self.chkDevEnvQ30 = Widgets.make_check_button()\n self.chkDevEnvQ31 = Widgets.make_check_button()\n self.chkDevEnvQ32 = Widgets.make_check_button()\n self.chkDevEnvQ33 = Widgets.make_check_button()\n self.chkDevEnvQ34 = Widgets.make_check_button()\n self.chkDevEnvQ35 = Widgets.make_check_button()\n self.chkDevEnvQ36 = Widgets.make_check_button()\n self.chkDevEnvQ37 = Widgets.make_check_button()\n self.chkDevEnvQ38 = Widgets.make_check_button()\n self.chkDevEnvQ39 = Widgets.make_check_button()\n self.chkDevEnvQ40 = Widgets.make_check_button()\n self.chkDevEnvQ41 = Widgets.make_check_button()\n self.chkDevEnvQ42 = Widgets.make_check_button()\n self.chkDevEnvQ43 = Widgets.make_check_button()\n self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',\n self._on_toggled, 0))\n self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',\n self._on_toggled, 1))\n self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',\n self._on_toggled, 2))\n self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',\n self._on_toggled, 3))\n self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',\n self._on_toggled, 4))\n self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',\n self._on_toggled, 5))\n self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',\n self._on_toggled, 6))\n self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',\n self._on_toggled, 7))\n self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',\n self._on_toggled, 8))\n self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',\n self._on_toggled, 9))\n self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',\n self._on_toggled, 10))\n self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',\n self._on_toggled, 11))\n self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',\n self._on_toggled, 12))\n self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',\n self._on_toggled, 13))\n self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',\n self._on_toggled, 14))\n self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',\n self._on_toggled, 15))\n self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',\n self._on_toggled, 16))\n self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',\n self._on_toggled, 17))\n self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',\n self._on_toggled, 18))\n self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',\n self._on_toggled, 19))\n self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',\n self._on_toggled, 20))\n self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',\n self._on_toggled, 21))\n self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',\n self._on_toggled, 22))\n self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',\n self._on_toggled, 23))\n self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',\n self._on_toggled, 24))\n self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',\n self._on_toggled, 25))\n self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',\n self._on_toggled, 26))\n self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',\n self._on_toggled, 27))\n self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',\n self._on_toggled, 28))\n self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',\n self._on_toggled, 29))\n self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',\n self._on_toggled, 30))\n self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',\n self._on_toggled, 31))\n self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',\n self._on_toggled, 32))\n self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',\n self._on_toggled, 33))\n self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',\n self._on_toggled, 34))\n self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',\n self._on_toggled, 35))\n self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',\n self._on_toggled, 36))\n self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',\n self._on_toggled, 37))\n self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',\n self._on_toggled, 38))\n self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',\n self._on_toggled, 39))\n self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',\n self._on_toggled, 40))\n self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',\n self._on_toggled, 41))\n self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',\n self._on_toggled, 42))\n\n def create_risk_analysis_page(self, notebook):\n \"\"\"\n Method to create the development environment risk analysis page and add\n it to the risk analysis gtk.Notebook().\n\n :param gtk.Notebook notebook: the gtk.Notebook() instance that will\n hold the development environment risk\n analysis questions.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n _hpaned = gtk.HPaned()\n self.pack1(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Organization'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u'1. There are separate design and coding organizations.'), _(\n u'2. There is an independent software test organization.'), _(\n u'3. There is an independent software quality assurance organization.'\n ), _(\n u'4. There is an independent software configuration management organization.'\n ), _(\n u'5. There is an independent software verification and validation organization.'\n ), _(\n u'6. A structured programming team will develop the software.'),\n _(\n u'7. The educational level of the software team members is above average.'\n ), _(\n u'8. The experience level of the software team members is above average.'\n )]\n _x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _x_pos += 125\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Methods'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(u'1. Standards are defined and will be enforced.'), _(\n u'2. Software will be developed using a higher order language.'\n ), _(\n u'3. The development process will include formal reviews (PDR, CDR, etc.).'\n ), _(\n u'4. The development process will include frequent walkthroughs.'\n ), _(\n u'5. Development will take a top-down and structured approach.'\n ), _(u'6. Unit development folders will be used.'), _(\n u'7. A software development library will be used.'), _(\n u'8. A formal change and error reporting process will be used.'\n ), _(u'9. Progress and status will routinely be reported.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\n _hpaned = gtk.HPaned()\n self.pack2(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Documentation'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u' 1. System requirements specifications will be documented.'),\n _(\n u' 2. Software requirements specifications will be documented.'\n ), _(u' 3. Interface design specifications will be documented.'\n ), _(u' 4. Software design specification will be documented.'),\n _(\n u' 5. Test plans, procedures, and reports will be documented.'),\n _(u' 6. The software development plan will be documented.'), _(\n u' 7. The software quality assurance plan will be documented.'),\n _(\n u' 8. The software configuration management plan will be documented.'\n ), _(u' 9. A requirements traceability matrix will be used.'),\n _(u'10. The software version description will be documented.'),\n _(u'11. All software discrepancies will be documented.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(\n u' 1. The software language requirements will be specified.'),\n _(u' 2. Formal program design language will be used.'), _(\n u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'\n ), _(u' 4. Simulation/emulation tools will be used.'), _(\n u' 5. Configuration management tools will be used.'), _(\n u' 6. A code auditing tool will be used.'), _(\n u' 7. A data flow analyzer will be used.'), _(\n u\" 8. A programmer's workbench will be used.\"), _(\n u' 9. Measurement tools will be used.'), _(\n u'10. Software code reviews will be used.'), _(\n u'11. Software branch testing will be used.'), _(\n u'12. Random testing will be used.'), _(\n u'13. Functional testing will be used.'), _(\n u'14. Error and anomaly detection testing will be used.'), _(\n u'15. Structure analysis will be used.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\n _label = gtk.Label()\n _label.set_markup(\"<span weight='bold'>\" + _(\n u'Development\\nEnvironment') + '</span>')\n _label.set_alignment(xalign=0.5, yalign=0.5)\n _label.set_justify(gtk.JUSTIFY_CENTER)\n _label.set_angle(0)\n _label.show_all()\n _label.set_tooltip_text(_(\n u'Assesses risk due to the development environment.'))\n notebook.insert_page(self, tab_label=_label, position=-1)\n return False\n\n def load(self, model):\n \"\"\"\n Method to load the Development Environment Risk Analysis answers.\n\n :param `rtk.software.Software` model: the Software data model to load\n the gtk.ToggleButton() from.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n self._software_model = model\n self.chkDevEnvQ1.set_active(model.lst_development[0])\n self.chkDevEnvQ2.set_active(model.lst_development[1])\n self.chkDevEnvQ3.set_active(model.lst_development[2])\n self.chkDevEnvQ4.set_active(model.lst_development[3])\n self.chkDevEnvQ5.set_active(model.lst_development[4])\n self.chkDevEnvQ6.set_active(model.lst_development[5])\n self.chkDevEnvQ7.set_active(model.lst_development[6])\n self.chkDevEnvQ8.set_active(model.lst_development[7])\n self.chkDevEnvQ9.set_active(model.lst_development[8])\n self.chkDevEnvQ10.set_active(model.lst_development[9])\n self.chkDevEnvQ11.set_active(model.lst_development[10])\n self.chkDevEnvQ12.set_active(model.lst_development[11])\n self.chkDevEnvQ13.set_active(model.lst_development[12])\n self.chkDevEnvQ14.set_active(model.lst_development[13])\n self.chkDevEnvQ15.set_active(model.lst_development[14])\n self.chkDevEnvQ16.set_active(model.lst_development[15])\n self.chkDevEnvQ17.set_active(model.lst_development[16])\n self.chkDevEnvQ18.set_active(model.lst_development[17])\n self.chkDevEnvQ19.set_active(model.lst_development[18])\n self.chkDevEnvQ20.set_active(model.lst_development[19])\n self.chkDevEnvQ21.set_active(model.lst_development[20])\n self.chkDevEnvQ22.set_active(model.lst_development[21])\n self.chkDevEnvQ23.set_active(model.lst_development[22])\n self.chkDevEnvQ24.set_active(model.lst_development[23])\n self.chkDevEnvQ25.set_active(model.lst_development[24])\n self.chkDevEnvQ26.set_active(model.lst_development[25])\n self.chkDevEnvQ27.set_active(model.lst_development[26])\n self.chkDevEnvQ28.set_active(model.lst_development[27])\n self.chkDevEnvQ29.set_active(model.lst_development[28])\n self.chkDevEnvQ30.set_active(model.lst_development[29])\n self.chkDevEnvQ31.set_active(model.lst_development[30])\n self.chkDevEnvQ32.set_active(model.lst_development[31])\n self.chkDevEnvQ33.set_active(model.lst_development[32])\n self.chkDevEnvQ34.set_active(model.lst_development[33])\n self.chkDevEnvQ35.set_active(model.lst_development[34])\n self.chkDevEnvQ36.set_active(model.lst_development[35])\n self.chkDevEnvQ37.set_active(model.lst_development[36])\n self.chkDevEnvQ38.set_active(model.lst_development[37])\n self.chkDevEnvQ39.set_active(model.lst_development[38])\n self.chkDevEnvQ40.set_active(model.lst_development[39])\n self.chkDevEnvQ41.set_active(model.lst_development[40])\n self.chkDevEnvQ42.set_active(model.lst_development[41])\n self.chkDevEnvQ43.set_active(model.lst_development[42])\n return False\n\n def _on_toggled(self, check, index):\n \"\"\"\n Callback method for gtk.CheckButton() 'toggled' event.\n\n :param gtk.CheckButton check: the gtk.CheckButton() that called this\n method.\n :param int index: the index of the Development Environment question\n associated with the gtk.CheckButton() that was\n toggled.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n check.handler_block(self._lst_handler_id[index])\n self._software_model.lst_development[index] = int(check.get_active())\n check.handler_unblock(self._lst_handler_id[index])\n return False\n",
"step-4": "<mask token>\nimport sys\nimport gettext\nimport locale\ntry:\n import pygtk\n pygtk.require('2.0')\nexcept ImportError:\n sys.exit(1)\ntry:\n import gtk\nexcept ImportError:\n sys.exit(1)\ntry:\n import gtk.glade\nexcept ImportError:\n sys.exit(1)\ntry:\n import Configuration\n import gui.gtk.Widgets as Widgets\nexcept ImportError:\n import rtk.Configuration as Configuration\n import rtk.gui.gtk.Widgets as Widgets\n__author__ = 'Andrew Rowland'\n__email__ = '[email protected]'\n__organization__ = 'ReliaQual Associates, LLC'\n__copyright__ = 'Copyright 2007 - 2015 Andrew \"weibullguy\" Rowland'\ntry:\n locale.setlocale(locale.LC_ALL, Configuration.LOCALE)\nexcept locale.Error:\n locale.setlocale(locale.LC_ALL, '')\n_ = gettext.gettext\n\n\nclass RiskAnalysis(gtk.VPaned):\n \"\"\"\n The Work Book view for analyzing and displaying the risk associated with\n the development environment. The attributes of a development environment\n Work Book view are:\n\n :ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.\n :ivar _software_model: the :py:class:`rtk.software.Software.Model` to\n display.\n \"\"\"\n\n def __init__(self):\n \"\"\"\n Method to initialize the development environment risk analysis\n questions Work Book page.\n \"\"\"\n gtk.VPaned.__init__(self)\n self._lst_handler_id = []\n self._software_model = None\n self.chkDevEnvQ1 = Widgets.make_check_button()\n self.chkDevEnvQ2 = Widgets.make_check_button()\n self.chkDevEnvQ3 = Widgets.make_check_button()\n self.chkDevEnvQ4 = Widgets.make_check_button()\n self.chkDevEnvQ5 = Widgets.make_check_button()\n self.chkDevEnvQ6 = Widgets.make_check_button()\n self.chkDevEnvQ7 = Widgets.make_check_button()\n self.chkDevEnvQ8 = Widgets.make_check_button()\n self.chkDevEnvQ9 = Widgets.make_check_button()\n self.chkDevEnvQ10 = Widgets.make_check_button()\n self.chkDevEnvQ11 = Widgets.make_check_button()\n self.chkDevEnvQ12 = Widgets.make_check_button()\n self.chkDevEnvQ13 = Widgets.make_check_button()\n self.chkDevEnvQ14 = Widgets.make_check_button()\n self.chkDevEnvQ15 = Widgets.make_check_button()\n self.chkDevEnvQ16 = Widgets.make_check_button()\n self.chkDevEnvQ17 = Widgets.make_check_button()\n self.chkDevEnvQ18 = Widgets.make_check_button()\n self.chkDevEnvQ19 = Widgets.make_check_button()\n self.chkDevEnvQ20 = Widgets.make_check_button()\n self.chkDevEnvQ21 = Widgets.make_check_button()\n self.chkDevEnvQ22 = Widgets.make_check_button()\n self.chkDevEnvQ23 = Widgets.make_check_button()\n self.chkDevEnvQ24 = Widgets.make_check_button()\n self.chkDevEnvQ25 = Widgets.make_check_button()\n self.chkDevEnvQ26 = Widgets.make_check_button()\n self.chkDevEnvQ27 = Widgets.make_check_button()\n self.chkDevEnvQ28 = Widgets.make_check_button()\n self.chkDevEnvQ29 = Widgets.make_check_button()\n self.chkDevEnvQ30 = Widgets.make_check_button()\n self.chkDevEnvQ31 = Widgets.make_check_button()\n self.chkDevEnvQ32 = Widgets.make_check_button()\n self.chkDevEnvQ33 = Widgets.make_check_button()\n self.chkDevEnvQ34 = Widgets.make_check_button()\n self.chkDevEnvQ35 = Widgets.make_check_button()\n self.chkDevEnvQ36 = Widgets.make_check_button()\n self.chkDevEnvQ37 = Widgets.make_check_button()\n self.chkDevEnvQ38 = Widgets.make_check_button()\n self.chkDevEnvQ39 = Widgets.make_check_button()\n self.chkDevEnvQ40 = Widgets.make_check_button()\n self.chkDevEnvQ41 = Widgets.make_check_button()\n self.chkDevEnvQ42 = Widgets.make_check_button()\n self.chkDevEnvQ43 = Widgets.make_check_button()\n self._lst_handler_id.append(self.chkDevEnvQ1.connect('toggled',\n self._on_toggled, 0))\n self._lst_handler_id.append(self.chkDevEnvQ2.connect('toggled',\n self._on_toggled, 1))\n self._lst_handler_id.append(self.chkDevEnvQ3.connect('toggled',\n self._on_toggled, 2))\n self._lst_handler_id.append(self.chkDevEnvQ4.connect('toggled',\n self._on_toggled, 3))\n self._lst_handler_id.append(self.chkDevEnvQ5.connect('toggled',\n self._on_toggled, 4))\n self._lst_handler_id.append(self.chkDevEnvQ6.connect('toggled',\n self._on_toggled, 5))\n self._lst_handler_id.append(self.chkDevEnvQ7.connect('toggled',\n self._on_toggled, 6))\n self._lst_handler_id.append(self.chkDevEnvQ8.connect('toggled',\n self._on_toggled, 7))\n self._lst_handler_id.append(self.chkDevEnvQ9.connect('toggled',\n self._on_toggled, 8))\n self._lst_handler_id.append(self.chkDevEnvQ10.connect('toggled',\n self._on_toggled, 9))\n self._lst_handler_id.append(self.chkDevEnvQ11.connect('toggled',\n self._on_toggled, 10))\n self._lst_handler_id.append(self.chkDevEnvQ12.connect('toggled',\n self._on_toggled, 11))\n self._lst_handler_id.append(self.chkDevEnvQ13.connect('toggled',\n self._on_toggled, 12))\n self._lst_handler_id.append(self.chkDevEnvQ14.connect('toggled',\n self._on_toggled, 13))\n self._lst_handler_id.append(self.chkDevEnvQ15.connect('toggled',\n self._on_toggled, 14))\n self._lst_handler_id.append(self.chkDevEnvQ16.connect('toggled',\n self._on_toggled, 15))\n self._lst_handler_id.append(self.chkDevEnvQ17.connect('toggled',\n self._on_toggled, 16))\n self._lst_handler_id.append(self.chkDevEnvQ18.connect('toggled',\n self._on_toggled, 17))\n self._lst_handler_id.append(self.chkDevEnvQ19.connect('toggled',\n self._on_toggled, 18))\n self._lst_handler_id.append(self.chkDevEnvQ20.connect('toggled',\n self._on_toggled, 19))\n self._lst_handler_id.append(self.chkDevEnvQ21.connect('toggled',\n self._on_toggled, 20))\n self._lst_handler_id.append(self.chkDevEnvQ22.connect('toggled',\n self._on_toggled, 21))\n self._lst_handler_id.append(self.chkDevEnvQ23.connect('toggled',\n self._on_toggled, 22))\n self._lst_handler_id.append(self.chkDevEnvQ24.connect('toggled',\n self._on_toggled, 23))\n self._lst_handler_id.append(self.chkDevEnvQ25.connect('toggled',\n self._on_toggled, 24))\n self._lst_handler_id.append(self.chkDevEnvQ26.connect('toggled',\n self._on_toggled, 25))\n self._lst_handler_id.append(self.chkDevEnvQ27.connect('toggled',\n self._on_toggled, 26))\n self._lst_handler_id.append(self.chkDevEnvQ28.connect('toggled',\n self._on_toggled, 27))\n self._lst_handler_id.append(self.chkDevEnvQ29.connect('toggled',\n self._on_toggled, 28))\n self._lst_handler_id.append(self.chkDevEnvQ30.connect('toggled',\n self._on_toggled, 29))\n self._lst_handler_id.append(self.chkDevEnvQ31.connect('toggled',\n self._on_toggled, 30))\n self._lst_handler_id.append(self.chkDevEnvQ32.connect('toggled',\n self._on_toggled, 31))\n self._lst_handler_id.append(self.chkDevEnvQ33.connect('toggled',\n self._on_toggled, 32))\n self._lst_handler_id.append(self.chkDevEnvQ34.connect('toggled',\n self._on_toggled, 33))\n self._lst_handler_id.append(self.chkDevEnvQ35.connect('toggled',\n self._on_toggled, 34))\n self._lst_handler_id.append(self.chkDevEnvQ36.connect('toggled',\n self._on_toggled, 35))\n self._lst_handler_id.append(self.chkDevEnvQ37.connect('toggled',\n self._on_toggled, 36))\n self._lst_handler_id.append(self.chkDevEnvQ38.connect('toggled',\n self._on_toggled, 37))\n self._lst_handler_id.append(self.chkDevEnvQ39.connect('toggled',\n self._on_toggled, 38))\n self._lst_handler_id.append(self.chkDevEnvQ40.connect('toggled',\n self._on_toggled, 39))\n self._lst_handler_id.append(self.chkDevEnvQ41.connect('toggled',\n self._on_toggled, 40))\n self._lst_handler_id.append(self.chkDevEnvQ42.connect('toggled',\n self._on_toggled, 41))\n self._lst_handler_id.append(self.chkDevEnvQ43.connect('toggled',\n self._on_toggled, 42))\n\n def create_risk_analysis_page(self, notebook):\n \"\"\"\n Method to create the development environment risk analysis page and add\n it to the risk analysis gtk.Notebook().\n\n :param gtk.Notebook notebook: the gtk.Notebook() instance that will\n hold the development environment risk\n analysis questions.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n _hpaned = gtk.HPaned()\n self.pack1(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Organization'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u'1. There are separate design and coding organizations.'), _(\n u'2. There is an independent software test organization.'), _(\n u'3. There is an independent software quality assurance organization.'\n ), _(\n u'4. There is an independent software configuration management organization.'\n ), _(\n u'5. There is an independent software verification and validation organization.'\n ), _(\n u'6. A structured programming team will develop the software.'),\n _(\n u'7. The educational level of the software team members is above average.'\n ), _(\n u'8. The experience level of the software team members is above average.'\n )]\n _x_pos, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _x_pos += 125\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Methods'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(u'1. Standards are defined and will be enforced.'), _(\n u'2. Software will be developed using a higher order language.'\n ), _(\n u'3. The development process will include formal reviews (PDR, CDR, etc.).'\n ), _(\n u'4. The development process will include frequent walkthroughs.'\n ), _(\n u'5. Development will take a top-down and structured approach.'\n ), _(u'6. Unit development folders will be used.'), _(\n u'7. A software development library will be used.'), _(\n u'8. A formal change and error reporting process will be used.'\n ), _(u'9. Progress and status will routinely be reported.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\n _hpaned = gtk.HPaned()\n self.pack2(_hpaned, resize=True, shrink=True)\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Documentation'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack1(_frame, True, True)\n _labels = [_(\n u' 1. System requirements specifications will be documented.'),\n _(\n u' 2. Software requirements specifications will be documented.'\n ), _(u' 3. Interface design specifications will be documented.'\n ), _(u' 4. Software design specification will be documented.'),\n _(\n u' 5. Test plans, procedures, and reports will be documented.'),\n _(u' 6. The software development plan will be documented.'), _(\n u' 7. The software quality assurance plan will be documented.'),\n _(\n u' 8. The software configuration management plan will be documented.'\n ), _(u' 9. A requirements traceability matrix will be used.'),\n _(u'10. The software version description will be documented.'),\n _(u'11. All software discrepancies will be documented.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\n _fixed = gtk.Fixed()\n _scrollwindow = gtk.ScrolledWindow()\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\n _scrollwindow.add_with_viewport(_fixed)\n _frame = Widgets.make_frame(label=_(u'Tools & Test Techniques'))\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\n _frame.add(_scrollwindow)\n _hpaned.pack2(_frame, True, True)\n _labels = [_(\n u' 1. The software language requirements will be specified.'),\n _(u' 2. Formal program design language will be used.'), _(\n u' 3. Program design graphical techniques (flowcharts, HIPO, etc.) will be used.'\n ), _(u' 4. Simulation/emulation tools will be used.'), _(\n u' 5. Configuration management tools will be used.'), _(\n u' 6. A code auditing tool will be used.'), _(\n u' 7. A data flow analyzer will be used.'), _(\n u\" 8. A programmer's workbench will be used.\"), _(\n u' 9. Measurement tools will be used.'), _(\n u'10. Software code reviews will be used.'), _(\n u'11. Software branch testing will be used.'), _(\n u'12. Random testing will be used.'), _(\n u'13. Functional testing will be used.'), _(\n u'14. Error and anomaly detection testing will be used.'), _(\n u'15. Structure analysis will be used.')]\n __, _y_pos = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\n _label = gtk.Label()\n _label.set_markup(\"<span weight='bold'>\" + _(\n u'Development\\nEnvironment') + '</span>')\n _label.set_alignment(xalign=0.5, yalign=0.5)\n _label.set_justify(gtk.JUSTIFY_CENTER)\n _label.set_angle(0)\n _label.show_all()\n _label.set_tooltip_text(_(\n u'Assesses risk due to the development environment.'))\n notebook.insert_page(self, tab_label=_label, position=-1)\n return False\n\n def load(self, model):\n \"\"\"\n Method to load the Development Environment Risk Analysis answers.\n\n :param `rtk.software.Software` model: the Software data model to load\n the gtk.ToggleButton() from.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n self._software_model = model\n self.chkDevEnvQ1.set_active(model.lst_development[0])\n self.chkDevEnvQ2.set_active(model.lst_development[1])\n self.chkDevEnvQ3.set_active(model.lst_development[2])\n self.chkDevEnvQ4.set_active(model.lst_development[3])\n self.chkDevEnvQ5.set_active(model.lst_development[4])\n self.chkDevEnvQ6.set_active(model.lst_development[5])\n self.chkDevEnvQ7.set_active(model.lst_development[6])\n self.chkDevEnvQ8.set_active(model.lst_development[7])\n self.chkDevEnvQ9.set_active(model.lst_development[8])\n self.chkDevEnvQ10.set_active(model.lst_development[9])\n self.chkDevEnvQ11.set_active(model.lst_development[10])\n self.chkDevEnvQ12.set_active(model.lst_development[11])\n self.chkDevEnvQ13.set_active(model.lst_development[12])\n self.chkDevEnvQ14.set_active(model.lst_development[13])\n self.chkDevEnvQ15.set_active(model.lst_development[14])\n self.chkDevEnvQ16.set_active(model.lst_development[15])\n self.chkDevEnvQ17.set_active(model.lst_development[16])\n self.chkDevEnvQ18.set_active(model.lst_development[17])\n self.chkDevEnvQ19.set_active(model.lst_development[18])\n self.chkDevEnvQ20.set_active(model.lst_development[19])\n self.chkDevEnvQ21.set_active(model.lst_development[20])\n self.chkDevEnvQ22.set_active(model.lst_development[21])\n self.chkDevEnvQ23.set_active(model.lst_development[22])\n self.chkDevEnvQ24.set_active(model.lst_development[23])\n self.chkDevEnvQ25.set_active(model.lst_development[24])\n self.chkDevEnvQ26.set_active(model.lst_development[25])\n self.chkDevEnvQ27.set_active(model.lst_development[26])\n self.chkDevEnvQ28.set_active(model.lst_development[27])\n self.chkDevEnvQ29.set_active(model.lst_development[28])\n self.chkDevEnvQ30.set_active(model.lst_development[29])\n self.chkDevEnvQ31.set_active(model.lst_development[30])\n self.chkDevEnvQ32.set_active(model.lst_development[31])\n self.chkDevEnvQ33.set_active(model.lst_development[32])\n self.chkDevEnvQ34.set_active(model.lst_development[33])\n self.chkDevEnvQ35.set_active(model.lst_development[34])\n self.chkDevEnvQ36.set_active(model.lst_development[35])\n self.chkDevEnvQ37.set_active(model.lst_development[36])\n self.chkDevEnvQ38.set_active(model.lst_development[37])\n self.chkDevEnvQ39.set_active(model.lst_development[38])\n self.chkDevEnvQ40.set_active(model.lst_development[39])\n self.chkDevEnvQ41.set_active(model.lst_development[40])\n self.chkDevEnvQ42.set_active(model.lst_development[41])\n self.chkDevEnvQ43.set_active(model.lst_development[42])\n return False\n\n def _on_toggled(self, check, index):\n \"\"\"\n Callback method for gtk.CheckButton() 'toggled' event.\n\n :param gtk.CheckButton check: the gtk.CheckButton() that called this\n method.\n :param int index: the index of the Development Environment question\n associated with the gtk.CheckButton() that was\n toggled.\n :return: False if successful or True if an error is encountered.\n :rtype: bool\n \"\"\"\n check.handler_block(self._lst_handler_id[index])\n self._software_model.lst_development[index] = int(check.get_active())\n check.handler_unblock(self._lst_handler_id[index])\n return False\n",
"step-5": "#!/usr/bin/env python\r\n\"\"\"\r\n##############################################################################\r\nSoftware Package Risk Analysis Development Environment Specific Work Book View\r\n##############################################################################\r\n\"\"\"\r\n\r\n# -*- coding: utf-8 -*-\r\n#\r\n# rtk.software.__gui.gtk.DevelopmentEnvironment.py is part of The RTK\r\n# Project\r\n#\r\n# All rights reserved.\r\n\r\nimport sys\r\n\r\n# Import modules for localization support.\r\nimport gettext\r\nimport locale\r\n\r\n# Modules required for the GUI.\r\ntry:\r\n import pygtk\r\n pygtk.require('2.0')\r\nexcept ImportError:\r\n sys.exit(1)\r\ntry:\r\n import gtk\r\nexcept ImportError:\r\n sys.exit(1)\r\ntry:\r\n import gtk.glade\r\nexcept ImportError:\r\n sys.exit(1)\r\n\r\n# Import other RTK modules.\r\ntry:\r\n import Configuration\r\n import gui.gtk.Widgets as Widgets\r\nexcept ImportError:\r\n import rtk.Configuration as Configuration\r\n import rtk.gui.gtk.Widgets as Widgets\r\n\r\n__author__ = 'Andrew Rowland'\r\n__email__ = '[email protected]'\r\n__organization__ = 'ReliaQual Associates, LLC'\r\n__copyright__ = 'Copyright 2007 - 2015 Andrew \"weibullguy\" Rowland'\r\n\r\ntry:\r\n locale.setlocale(locale.LC_ALL, Configuration.LOCALE)\r\nexcept locale.Error:\r\n locale.setlocale(locale.LC_ALL, '')\r\n\r\n_ = gettext.gettext\r\n\r\n\r\nclass RiskAnalysis(gtk.VPaned):\r\n \"\"\"\r\n The Work Book view for analyzing and displaying the risk associated with\r\n the development environment. The attributes of a development environment\r\n Work Book view are:\r\n\r\n :ivar list _lst_handler_id: the list of gtk.Widget() signal handler IDs.\r\n :ivar _software_model: the :py:class:`rtk.software.Software.Model` to\r\n display.\r\n \"\"\"\r\n\r\n def __init__(self):\r\n \"\"\"\r\n Method to initialize the development environment risk analysis\r\n questions Work Book page.\r\n \"\"\"\r\n\r\n gtk.VPaned.__init__(self)\r\n\r\n # Define private dictionary attributes.\r\n\r\n # Define private list attributes.\r\n self._lst_handler_id = []\r\n\r\n # Define private scalar attributes.\r\n self._software_model = None\r\n\r\n # Define public dictionary attributes.\r\n\r\n # Define public list attributes.\r\n\r\n # Define public scalar attributes.\r\n self.chkDevEnvQ1 = Widgets.make_check_button()\r\n self.chkDevEnvQ2 = Widgets.make_check_button()\r\n self.chkDevEnvQ3 = Widgets.make_check_button()\r\n self.chkDevEnvQ4 = Widgets.make_check_button()\r\n self.chkDevEnvQ5 = Widgets.make_check_button()\r\n self.chkDevEnvQ6 = Widgets.make_check_button()\r\n self.chkDevEnvQ7 = Widgets.make_check_button()\r\n self.chkDevEnvQ8 = Widgets.make_check_button()\r\n self.chkDevEnvQ9 = Widgets.make_check_button()\r\n self.chkDevEnvQ10 = Widgets.make_check_button()\r\n self.chkDevEnvQ11 = Widgets.make_check_button()\r\n self.chkDevEnvQ12 = Widgets.make_check_button()\r\n self.chkDevEnvQ13 = Widgets.make_check_button()\r\n self.chkDevEnvQ14 = Widgets.make_check_button()\r\n self.chkDevEnvQ15 = Widgets.make_check_button()\r\n self.chkDevEnvQ16 = Widgets.make_check_button()\r\n self.chkDevEnvQ17 = Widgets.make_check_button()\r\n self.chkDevEnvQ18 = Widgets.make_check_button()\r\n self.chkDevEnvQ19 = Widgets.make_check_button()\r\n self.chkDevEnvQ20 = Widgets.make_check_button()\r\n self.chkDevEnvQ21 = Widgets.make_check_button()\r\n self.chkDevEnvQ22 = Widgets.make_check_button()\r\n self.chkDevEnvQ23 = Widgets.make_check_button()\r\n self.chkDevEnvQ24 = Widgets.make_check_button()\r\n self.chkDevEnvQ25 = Widgets.make_check_button()\r\n self.chkDevEnvQ26 = Widgets.make_check_button()\r\n self.chkDevEnvQ27 = Widgets.make_check_button()\r\n self.chkDevEnvQ28 = Widgets.make_check_button()\r\n self.chkDevEnvQ29 = Widgets.make_check_button()\r\n self.chkDevEnvQ30 = Widgets.make_check_button()\r\n self.chkDevEnvQ31 = Widgets.make_check_button()\r\n self.chkDevEnvQ32 = Widgets.make_check_button()\r\n self.chkDevEnvQ33 = Widgets.make_check_button()\r\n self.chkDevEnvQ34 = Widgets.make_check_button()\r\n self.chkDevEnvQ35 = Widgets.make_check_button()\r\n self.chkDevEnvQ36 = Widgets.make_check_button()\r\n self.chkDevEnvQ37 = Widgets.make_check_button()\r\n self.chkDevEnvQ38 = Widgets.make_check_button()\r\n self.chkDevEnvQ39 = Widgets.make_check_button()\r\n self.chkDevEnvQ40 = Widgets.make_check_button()\r\n self.chkDevEnvQ41 = Widgets.make_check_button()\r\n self.chkDevEnvQ42 = Widgets.make_check_button()\r\n self.chkDevEnvQ43 = Widgets.make_check_button()\r\n\r\n # Connect gtk.Widget() signals to callback methods.\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ1.connect('toggled', self._on_toggled, 0))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ2.connect('toggled', self._on_toggled, 1))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ3.connect('toggled', self._on_toggled, 2))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ4.connect('toggled', self._on_toggled, 3))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ5.connect('toggled', self._on_toggled, 4))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ6.connect('toggled', self._on_toggled, 5))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ7.connect('toggled', self._on_toggled, 6))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ8.connect('toggled', self._on_toggled, 7))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ9.connect('toggled', self._on_toggled, 8))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ10.connect('toggled', self._on_toggled, 9))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ11.connect('toggled', self._on_toggled, 10))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ12.connect('toggled', self._on_toggled, 11))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ13.connect('toggled', self._on_toggled, 12))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ14.connect('toggled', self._on_toggled, 13))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ15.connect('toggled', self._on_toggled, 14))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ16.connect('toggled', self._on_toggled, 15))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ17.connect('toggled', self._on_toggled, 16))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ18.connect('toggled', self._on_toggled, 17))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ19.connect('toggled', self._on_toggled, 18))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ20.connect('toggled', self._on_toggled, 19))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ21.connect('toggled', self._on_toggled, 20))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ22.connect('toggled', self._on_toggled, 21))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ23.connect('toggled', self._on_toggled, 22))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ24.connect('toggled', self._on_toggled, 23))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ25.connect('toggled', self._on_toggled, 24))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ26.connect('toggled', self._on_toggled, 25))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ27.connect('toggled', self._on_toggled, 26))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ28.connect('toggled', self._on_toggled, 27))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ29.connect('toggled', self._on_toggled, 28))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ30.connect('toggled', self._on_toggled, 29))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ31.connect('toggled', self._on_toggled, 30))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ32.connect('toggled', self._on_toggled, 31))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ33.connect('toggled', self._on_toggled, 32))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ34.connect('toggled', self._on_toggled, 33))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ35.connect('toggled', self._on_toggled, 34))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ36.connect('toggled', self._on_toggled, 35))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ37.connect('toggled', self._on_toggled, 36))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ38.connect('toggled', self._on_toggled, 37))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ39.connect('toggled', self._on_toggled, 38))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ40.connect('toggled', self._on_toggled, 39))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ41.connect('toggled', self._on_toggled, 40))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ42.connect('toggled', self._on_toggled, 41))\r\n self._lst_handler_id.append(\r\n self.chkDevEnvQ43.connect('toggled', self._on_toggled, 42))\r\n\r\n def create_risk_analysis_page(self, notebook):\r\n \"\"\"\r\n Method to create the development environment risk analysis page and add\r\n it to the risk analysis gtk.Notebook().\r\n\r\n :param gtk.Notebook notebook: the gtk.Notebook() instance that will\r\n hold the development environment risk\r\n analysis questions.\r\n :return: False if successful or True if an error is encountered.\r\n :rtype: bool\r\n \"\"\"\r\n\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n # Build-up the containers for the tab. #\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n _hpaned = gtk.HPaned()\r\n self.pack1(_hpaned, resize=True, shrink=True)\r\n\r\n # Create the organizational risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Organization\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack1(_frame, True, True)\r\n\r\n _labels = [_(u\"1. There are separate design and coding \"\r\n u\"organizations.\"),\r\n _(u\"2. There is an independent software test \"\r\n u\"organization.\"),\r\n _(u\"3. There is an independent software quality \"\r\n u\"assurance organization.\"),\r\n _(u\"4. There is an independent software configuration \"\r\n u\"management organization.\"),\r\n _(u\"5. There is an independent software verification \"\r\n u\"and validation organization.\"),\r\n _(u\"6. A structured programming team will develop the \"\r\n u\"software.\"),\r\n _(u\"7. The educational level of the software team members \"\r\n u\"is above average.\"),\r\n _(u\"8. The experience level of the software team members \"\r\n u\"is above average.\")]\r\n (_x_pos,\r\n _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n _x_pos += 125\r\n\r\n _fixed.put(self.chkDevEnvQ1, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ2, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ3, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ4, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ5, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ6, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ7, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ8, _x_pos, _y_pos[7])\r\n\r\n # Create the methods risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Methods\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack2(_frame, True, True)\r\n\r\n _labels = [_(u\"1. Standards are defined and will be enforced.\"),\r\n _(u\"2. Software will be developed using a higher order \"\r\n u\"language.\"),\r\n _(u\"3. The development process will include formal \"\r\n u\"reviews (PDR, CDR, etc.).\"),\r\n _(u\"4. The development process will include frequent \"\r\n u\"walkthroughs.\"),\r\n _(u\"5. Development will take a top-down and \"\r\n u\"structured approach.\"),\r\n _(u\"6. Unit development folders will be used.\"),\r\n _(u\"7. A software development library will be used.\"),\r\n _(u\"8. A formal change and error reporting process \"\r\n u\"will be used.\"),\r\n _(u\"9. Progress and status will routinely be \"\r\n u\"reported.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ9, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ10, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ11, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ12, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ13, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ14, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ15, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ16, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ17, _x_pos, _y_pos[8])\r\n\r\n # Create the documentation risk pane.\r\n _hpaned = gtk.HPaned()\r\n self.pack2(_hpaned, resize=True, shrink=True)\r\n\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Documentation\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack1(_frame, True, True)\r\n\r\n _labels = [_(u\" 1. System requirements specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 2. Software requirements specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 3. Interface design specifications will be \"\r\n u\"documented.\"),\r\n _(u\" 4. Software design specification will be \"\r\n u\"documented.\"),\r\n _(u\" 5. Test plans, procedures, and reports will be \"\r\n u\"documented.\"),\r\n _(u\" 6. The software development plan will be \"\r\n u\"documented.\"),\r\n _(u\" 7. The software quality assurance plan will be \"\r\n u\"documented.\"),\r\n _(u\" 8. The software configuration management plan will \"\r\n u\"be documented.\"),\r\n _(u\" 9. A requirements traceability matrix will be \"\r\n u\"used.\"),\r\n _(u\"10. The software version description will be \"\r\n u\"documented.\"),\r\n _(u\"11. All software discrepancies will be \"\r\n u\"documented.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ18, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ19, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ20, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ21, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ22, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ23, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ24, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ25, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ26, _x_pos, _y_pos[8])\r\n _fixed.put(self.chkDevEnvQ27, _x_pos, _y_pos[9])\r\n _fixed.put(self.chkDevEnvQ28, _x_pos, _y_pos[10])\r\n\r\n # Create the tools and test techniques risk pane.\r\n _fixed = gtk.Fixed()\r\n\r\n _scrollwindow = gtk.ScrolledWindow()\r\n _scrollwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)\r\n _scrollwindow.add_with_viewport(_fixed)\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Tools & Test Techniques\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_OUT)\r\n _frame.add(_scrollwindow)\r\n\r\n _hpaned.pack2(_frame, True, True)\r\n\r\n _labels = [_(u\" 1. The software language requirements will be \"\r\n u\"specified.\"),\r\n _(u\" 2. Formal program design language will be used.\"),\r\n _(u\" 3. Program design graphical techniques \"\r\n u\"(flowcharts, HIPO, etc.) will be used.\"),\r\n _(u\" 4. Simulation/emulation tools will be used.\"),\r\n _(u\" 5. Configuration management tools will be used.\"),\r\n _(u\" 6. A code auditing tool will be used.\"),\r\n _(u\" 7. A data flow analyzer will be used.\"),\r\n _(u\" 8. A programmer's workbench will be used.\"),\r\n _(u\" 9. Measurement tools will be used.\"),\r\n _(u\"10. Software code reviews will be used.\"),\r\n _(u\"11. Software branch testing will be used.\"),\r\n _(u\"12. Random testing will be used.\"),\r\n _(u\"13. Functional testing will be used.\"),\r\n _(u\"14. Error and anomaly detection testing will be \"\r\n u\"used.\"),\r\n _(u\"15. Structure analysis will be used.\")]\r\n (__, _y_pos) = Widgets.make_labels(_labels, _fixed, 5, 5, wrap=False)\r\n\r\n _fixed.put(self.chkDevEnvQ29, _x_pos, _y_pos[0])\r\n _fixed.put(self.chkDevEnvQ30, _x_pos, _y_pos[1])\r\n _fixed.put(self.chkDevEnvQ31, _x_pos, _y_pos[2])\r\n _fixed.put(self.chkDevEnvQ32, _x_pos, _y_pos[3])\r\n _fixed.put(self.chkDevEnvQ33, _x_pos, _y_pos[4])\r\n _fixed.put(self.chkDevEnvQ34, _x_pos, _y_pos[5])\r\n _fixed.put(self.chkDevEnvQ35, _x_pos, _y_pos[6])\r\n _fixed.put(self.chkDevEnvQ36, _x_pos, _y_pos[7])\r\n _fixed.put(self.chkDevEnvQ37, _x_pos, _y_pos[8])\r\n _fixed.put(self.chkDevEnvQ38, _x_pos, _y_pos[9])\r\n _fixed.put(self.chkDevEnvQ39, _x_pos, _y_pos[10])\r\n _fixed.put(self.chkDevEnvQ40, _x_pos, _y_pos[11])\r\n _fixed.put(self.chkDevEnvQ41, _x_pos, _y_pos[12])\r\n _fixed.put(self.chkDevEnvQ42, _x_pos, _y_pos[13])\r\n _fixed.put(self.chkDevEnvQ43, _x_pos, _y_pos[14])\r\n\r\n _label = gtk.Label()\r\n _label.set_markup(\"<span weight='bold'>\" +\r\n _(u\"Development\\nEnvironment\") +\r\n \"</span>\")\r\n _label.set_alignment(xalign=0.5, yalign=0.5)\r\n _label.set_justify(gtk.JUSTIFY_CENTER)\r\n _label.set_angle(0)\r\n _label.show_all()\r\n _label.set_tooltip_text(_(u\"Assesses risk due to the development \"\r\n u\"environment.\"))\r\n notebook.insert_page(self, tab_label=_label, position=-1)\r\n\r\n return False\r\n\r\n def load(self, model):\r\n \"\"\"\r\n Method to load the Development Environment Risk Analysis answers.\r\n\r\n :param `rtk.software.Software` model: the Software data model to load\r\n the gtk.ToggleButton() from.\r\n :return: False if successful or True if an error is encountered.\r\n :rtype: bool\r\n \"\"\"\r\n\r\n self._software_model = model\r\n\r\n self.chkDevEnvQ1.set_active(model.lst_development[0])\r\n self.chkDevEnvQ2.set_active(model.lst_development[1])\r\n self.chkDevEnvQ3.set_active(model.lst_development[2])\r\n self.chkDevEnvQ4.set_active(model.lst_development[3])\r\n self.chkDevEnvQ5.set_active(model.lst_development[4])\r\n self.chkDevEnvQ6.set_active(model.lst_development[5])\r\n self.chkDevEnvQ7.set_active(model.lst_development[6])\r\n self.chkDevEnvQ8.set_active(model.lst_development[7])\r\n self.chkDevEnvQ9.set_active(model.lst_development[8])\r\n self.chkDevEnvQ10.set_active(model.lst_development[9])\r\n self.chkDevEnvQ11.set_active(model.lst_development[10])\r\n self.chkDevEnvQ12.set_active(model.lst_development[11])\r\n self.chkDevEnvQ13.set_active(model.lst_development[12])\r\n self.chkDevEnvQ14.set_active(model.lst_development[13])\r\n self.chkDevEnvQ15.set_active(model.lst_development[14])\r\n self.chkDevEnvQ16.set_active(model.lst_development[15])\r\n self.chkDevEnvQ17.set_active(model.lst_development[16])\r\n self.chkDevEnvQ18.set_active(model.lst_development[17])\r\n self.chkDevEnvQ19.set_active(model.lst_development[18])\r\n self.chkDevEnvQ20.set_active(model.lst_development[19])\r\n self.chkDevEnvQ21.set_active(model.lst_development[20])\r\n self.chkDevEnvQ22.set_active(model.lst_development[21])\r\n self.chkDevEnvQ23.set_active(model.lst_development[22])\r\n self.chkDevEnvQ24.set_active(model.lst_development[23])\r\n self.chkDevEnvQ25.set_active(model.lst_development[24])\r\n self.chkDevEnvQ26.set_active(model.lst_development[25])\r\n self.chkDevEnvQ27.set_active(model.lst_development[26])\r\n self.chkDevEnvQ28.set_active(model.lst_development[27])\r\n self.chkDevEnvQ29.set_active(model.lst_development[28])\r\n self.chkDevEnvQ30.set_active(model.lst_development[29])\r\n self.chkDevEnvQ31.set_active(model.lst_development[30])\r\n self.chkDevEnvQ32.set_active(model.lst_development[31])\r\n self.chkDevEnvQ33.set_active(model.lst_development[32])\r\n self.chkDevEnvQ34.set_active(model.lst_development[33])\r\n self.chkDevEnvQ35.set_active(model.lst_development[34])\r\n self.chkDevEnvQ36.set_active(model.lst_development[35])\r\n self.chkDevEnvQ37.set_active(model.lst_development[36])\r\n self.chkDevEnvQ38.set_active(model.lst_development[37])\r\n self.chkDevEnvQ39.set_active(model.lst_development[38])\r\n self.chkDevEnvQ40.set_active(model.lst_development[39])\r\n self.chkDevEnvQ41.set_active(model.lst_development[40])\r\n self.chkDevEnvQ42.set_active(model.lst_development[41])\r\n self.chkDevEnvQ43.set_active(model.lst_development[42])\r\n\r\n return False\r\n\r\n def _on_toggled(self, check, index):\r\n \"\"\"\r\n Callback method for gtk.CheckButton() 'toggled' event.\r\n\r\n :param gtk.CheckButton check: the gtk.CheckButton() that called this\r\n method.\r\n :param int index: the index of the Development Environment question\r\n associated with the gtk.CheckButton() that was\r\n toggled.\r\n :return: False if successful or True if an error is encountered.\r\n :rtype: bool\r\n \"\"\"\r\n\r\n check.handler_block(self._lst_handler_id[index])\r\n\r\n self._software_model.lst_development[index] = int(check.get_active())\r\n\r\n check.handler_unblock(self._lst_handler_id[index])\r\n\r\n return False\r\n",
"step-ids": [
4,
5,
7,
9,
10
]
}
|
[
4,
5,
7,
9,
10
] |
'''
* @file IntQueue.py
* @author (original JAVA) William Fiset, [email protected]
* liujingkun, [email protected]
* (conversion to Python) Armin Zare Zadeh, [email protected]
* @date 23 Jun 2020
* @version 0.1
* @brief This file contains an implementation of an integer only queue.
*
'''
import time
from array import array as arr
from collections import deque
from Queue import Queue
class IntQueue(Queue):
'''
An integer only implementation of a queue
'''
def __init__(self, maxSize):
"""
maxSize is the maximum number of items
that can be in the queue at any given time
"""
self.front = 0
self.end = 0
self.qSize = 0
self.data = arr('i', (0 for i in range(maxSize)))
def isEmpty(self):
"""
Return true/false on whether the queue is empty
"""
return self.qSize == 0
def size(self):
"""
Return the number of elements inside the queue
"""
return self.qSize
def peek(self):
if self.isEmpty():
raise Exception('Queue is empty')
self.front = self.front % len(self.data)
return self.data[self.front]
def isFull(self):
return self.qSize == len(self.data)
def offer(self, value):
"""
Add an element to the queue
"""
if self.isFull():
raise Exception("Queue too small!")
self.data[self.end] = value
self.end += 1
self.qSize += 1
self.end = self.end % len(self.data)
def poll(self):
"""
Make sure you check is the queue is not empty before calling poll!
"""
if self.isEmpty():
raise Exception('Queue is empty')
self.qSize -= 1
self.front = self.front % len(self.data)
d = self.data[self.front]
self.front += 1
return d
def benchMarkTest():
"""
BenchMark IntQueue vs ArrayDeque.
"""
n = 10000000
intQ = IntQueue(n)
# IntQueue times at around 12.109375 seconds
start = time.process_time()
for i in range(0, n):
intQ.offer(i)
for i in range(0, n):
intQ.poll()
end = time.process_time()
print("IntQueue Time: ", (end - start))
# ArrayDeque times at around 1.1875 seconds
arrayDeque = deque()
start = time.process_time()
for i in range(0, n):
arrayDeque.append(i)
for i in range(0, n):
arrayDeque.popleft()
end = time.process_time()
print("ArrayDeque Time: ", (end - start))
if __name__ == '__main__':
"""
Example usage
"""
q = IntQueue(5)
q.offer(1)
q.offer(2)
q.offer(3)
q.offer(4)
q.offer(5)
print(q.poll()) # 1
print(q.poll()) # 2
print(q.poll()) # 3
print(q.poll()) # 4
print(q.isEmpty()) # false
q.offer(1);
q.offer(2);
q.offer(3);
print(q.poll()) # 5
print(q.poll()) # 1
print(q.poll()) # 2
print(q.poll()) # 3
print(q.isEmpty()) # true
benchMarkTest()
|
normal
|
{
"blob_id": "0ed99037d7ff708b7931fbc3553b1aeb19a20f53",
"index": 810,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass IntQueue(Queue):\n <mask token>\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\"\n n = 10000000\n intQ = IntQueue(n)\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print('IntQueue Time: ', end - start)\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print('ArrayDeque Time: ', end - start)\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n q = IntQueue(5)\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n q.offer(1)\n q.offer(2)\n q.offer(3)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n benchMarkTest()\n",
"step-4": "<mask token>\nimport time\nfrom array import array as arr\nfrom collections import deque\nfrom Queue import Queue\n\n\nclass IntQueue(Queue):\n \"\"\" \n An integer only implementation of a queue\n \"\"\"\n\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\"\n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\"\n return self.qSize\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception('Queue too small!')\n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\"\n n = 10000000\n intQ = IntQueue(n)\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print('IntQueue Time: ', end - start)\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print('ArrayDeque Time: ', end - start)\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n q = IntQueue(5)\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n q.offer(1)\n q.offer(2)\n q.offer(3)\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.poll())\n print(q.isEmpty())\n benchMarkTest()\n",
"step-5": "'''\n * @file IntQueue.py\n * @author (original JAVA) William Fiset, [email protected]\n * liujingkun, [email protected]\n * (conversion to Python) Armin Zare Zadeh, [email protected]\n * @date 23 Jun 2020\n * @version 0.1\n * @brief This file contains an implementation of an integer only queue.\n * \n'''\n\nimport time\nfrom array import array as arr\nfrom collections import deque\nfrom Queue import Queue\n\n\nclass IntQueue(Queue):\n ''' \n An integer only implementation of a queue\n '''\n def __init__(self, maxSize):\n \"\"\"\n maxSize is the maximum number of items\n that can be in the queue at any given time\n \"\"\" \n self.front = 0\n self.end = 0\n self.qSize = 0\n self.data = arr('i', (0 for i in range(maxSize)))\n\n\n def isEmpty(self):\n \"\"\"\n Return true/false on whether the queue is empty\n \"\"\"\n return self.qSize == 0\n\n\n def size(self):\n \"\"\"\n Return the number of elements inside the queue\n \"\"\" \n return self.qSize\n\n\n def peek(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n \n self.front = self.front % len(self.data)\n return self.data[self.front]\n\n\n def isFull(self):\n return self.qSize == len(self.data)\n\n\n def offer(self, value):\n \"\"\"\n Add an element to the queue\n \"\"\"\n if self.isFull():\n raise Exception(\"Queue too small!\")\n \n self.data[self.end] = value\n self.end += 1\n self.qSize += 1\n self.end = self.end % len(self.data)\n\n\n def poll(self):\n \"\"\"\n Make sure you check is the queue is not empty before calling poll!\n \"\"\"\n if self.isEmpty():\n raise Exception('Queue is empty')\n \n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d\n\n\n\ndef benchMarkTest():\n \"\"\"\n BenchMark IntQueue vs ArrayDeque.\n \"\"\" \n\n n = 10000000\n intQ = IntQueue(n)\n\n # IntQueue times at around 12.109375 seconds\n start = time.process_time()\n for i in range(0, n):\n intQ.offer(i)\n for i in range(0, n):\n intQ.poll()\n end = time.process_time()\n print(\"IntQueue Time: \", (end - start))\n\n # ArrayDeque times at around 1.1875 seconds\n arrayDeque = deque()\n start = time.process_time()\n for i in range(0, n):\n arrayDeque.append(i)\n for i in range(0, n):\n arrayDeque.popleft()\n end = time.process_time()\n print(\"ArrayDeque Time: \", (end - start))\n\n\n\nif __name__ == '__main__':\n \"\"\"\n Example usage\n \"\"\"\n\n q = IntQueue(5)\n\n q.offer(1)\n q.offer(2)\n q.offer(3)\n q.offer(4)\n q.offer(5)\n\n print(q.poll()) # 1\n print(q.poll()) # 2\n print(q.poll()) # 3\n print(q.poll()) # 4\n\n print(q.isEmpty()) # false\n\n q.offer(1);\n q.offer(2);\n q.offer(3);\n\n print(q.poll()) # 5\n print(q.poll()) # 1\n print(q.poll()) # 2\n print(q.poll()) # 3\n\n print(q.isEmpty()) # true\n\n benchMarkTest()\n",
"step-ids": [
0,
8,
11,
12,
13
]
}
|
[
0,
8,
11,
12,
13
] |
from random import randint
import matplotlib.pyplot as plt
def generate_list(length: int) -> list:
"""Generate a list with given length with random integer values in the interval [0, length]
Args:
length (int): List length
Returns:
list: List generated with random values
"""
return [randint(0, length + 1) for _ in range(length)]
def plot_table(timestamps: dict, threadList: list, mList: list) -> None:
"""Plot standard deviation chart
Args:
k (list): Threads/Process used
deviation (list): Standard deviation of the timestamps
label (str): "Threads" or "Processos"
"""
plt.plot(threadList, timestamps.values(), 'o-')
plt.legend(mList, title = 'Total valores', loc='best', bbox_to_anchor=(0.5, 0., 0.5, 0.5))
plt.xlabel('Número de processos')
plt.ylabel('Tempo de Execução (s)')
plt.title('Tempo de Execução por Total de Processos e Valores')
plt.show()
|
normal
|
{
"blob_id": "8804bfc5bed8b93e50279f0cbab561fe09d92a64",
"index": 6522,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef plot_table(timestamps: dict, threadList: list, mList: list) ->None:\n \"\"\"Plot standard deviation chart\n\n Args:\n k (list): Threads/Process used\n deviation (list): Standard deviation of the timestamps\n label (str): \"Threads\" or \"Processos\"\n \"\"\"\n plt.plot(threadList, timestamps.values(), 'o-')\n plt.legend(mList, title='Total valores', loc='best', bbox_to_anchor=(\n 0.5, 0.0, 0.5, 0.5))\n plt.xlabel('Número de processos')\n plt.ylabel('Tempo de Execução (s)')\n plt.title('Tempo de Execução por Total de Processos e Valores')\n plt.show()\n",
"step-3": "<mask token>\n\n\ndef generate_list(length: int) ->list:\n \"\"\"Generate a list with given length with random integer values in the interval [0, length]\n\n Args:\n length (int): List length\n\n Returns:\n list: List generated with random values\n \"\"\"\n return [randint(0, length + 1) for _ in range(length)]\n\n\ndef plot_table(timestamps: dict, threadList: list, mList: list) ->None:\n \"\"\"Plot standard deviation chart\n\n Args:\n k (list): Threads/Process used\n deviation (list): Standard deviation of the timestamps\n label (str): \"Threads\" or \"Processos\"\n \"\"\"\n plt.plot(threadList, timestamps.values(), 'o-')\n plt.legend(mList, title='Total valores', loc='best', bbox_to_anchor=(\n 0.5, 0.0, 0.5, 0.5))\n plt.xlabel('Número de processos')\n plt.ylabel('Tempo de Execução (s)')\n plt.title('Tempo de Execução por Total de Processos e Valores')\n plt.show()\n",
"step-4": "from random import randint\nimport matplotlib.pyplot as plt\n\n\ndef generate_list(length: int) ->list:\n \"\"\"Generate a list with given length with random integer values in the interval [0, length]\n\n Args:\n length (int): List length\n\n Returns:\n list: List generated with random values\n \"\"\"\n return [randint(0, length + 1) for _ in range(length)]\n\n\ndef plot_table(timestamps: dict, threadList: list, mList: list) ->None:\n \"\"\"Plot standard deviation chart\n\n Args:\n k (list): Threads/Process used\n deviation (list): Standard deviation of the timestamps\n label (str): \"Threads\" or \"Processos\"\n \"\"\"\n plt.plot(threadList, timestamps.values(), 'o-')\n plt.legend(mList, title='Total valores', loc='best', bbox_to_anchor=(\n 0.5, 0.0, 0.5, 0.5))\n plt.xlabel('Número de processos')\n plt.ylabel('Tempo de Execução (s)')\n plt.title('Tempo de Execução por Total de Processos e Valores')\n plt.show()\n",
"step-5": "from random import randint\nimport matplotlib.pyplot as plt\n\ndef generate_list(length: int) -> list:\n \"\"\"Generate a list with given length with random integer values in the interval [0, length]\n\n Args:\n length (int): List length\n\n Returns:\n list: List generated with random values\n \"\"\"\n\n return [randint(0, length + 1) for _ in range(length)]\n\ndef plot_table(timestamps: dict, threadList: list, mList: list) -> None:\n \"\"\"Plot standard deviation chart\n\n Args:\n k (list): Threads/Process used\n deviation (list): Standard deviation of the timestamps\n label (str): \"Threads\" or \"Processos\"\n \"\"\"\n plt.plot(threadList, timestamps.values(), 'o-')\n plt.legend(mList, title = 'Total valores', loc='best', bbox_to_anchor=(0.5, 0., 0.5, 0.5))\n plt.xlabel('Número de processos')\n plt.ylabel('Tempo de Execução (s)')\n plt.title('Tempo de Execução por Total de Processos e Valores')\n plt.show()\n ",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
N = int(input())
A_list = list(map(int, input().split()))
B_list = list(map(int, input().split()))
C_list = list(map(int, input().split()))
ans = 0
for i in range(N):
ans += B_list[A_list[i] - 1]
if i < N - 1:
if A_list[i] + 1 == A_list[i + 1]:
ans += C_list[A_list[i] - 1]
print(ans)
|
normal
|
{
"blob_id": "cc160b1b0478446ba0daec4a0fe9e63453df3d96",
"index": 5029,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(N):\n ans += B_list[A_list[i] - 1]\n if i < N - 1:\n if A_list[i] + 1 == A_list[i + 1]:\n ans += C_list[A_list[i] - 1]\nprint(ans)\n",
"step-3": "N = int(input())\nA_list = list(map(int, input().split()))\nB_list = list(map(int, input().split()))\nC_list = list(map(int, input().split()))\nans = 0\nfor i in range(N):\n ans += B_list[A_list[i] - 1]\n if i < N - 1:\n if A_list[i] + 1 == A_list[i + 1]:\n ans += C_list[A_list[i] - 1]\nprint(ans)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import tensorflow as tf
import settings
import numpy as np
slim = tf.contrib.slim
class Model:
def __init__(self, training = True):
self.classes = settings.classes_name
self.num_classes = len(settings.classes_name)
self.image_size = settings.image_size
self.cell_size = settings.cell_size
self.boxes_per_cell = settings.box_per_cell
self.output_size = (self.cell_size * self.cell_size) * (self.num_classes + self.boxes_per_cell * 5)
self.scale = 1.0 * self.image_size / self.cell_size
self.boundary1 = self.cell_size * self.cell_size * self.num_classes
self.boundary2 = self.boundary1 + self.cell_size * self.cell_size * self.boxes_per_cell
self.object_scale = settings.object_scale
self.no_object_scale = settings.no_object_scale
self.class_scale = settings.class_scale
self.coord_scale = settings.coordinate_scale
self.offset = np.transpose(np.reshape(np.array([np.arange(self.cell_size)] * self.cell_size * self.boxes_per_cell), (self.boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))
self.images = tf.placeholder(tf.float32, [None, settings.image_size, settings.image_size, 3])
if settings.model_type == 'normal':
self.logits = self.build_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)
if settings.model_type == 'fast':
self.logits = self.build_fast_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)
if training:
self.batch = tf.Variable(0)
self.labels = tf.placeholder(tf.float32, [None, self.cell_size, self.cell_size, 5 + self.num_classes])
self.loss_layer(self.logits, self.labels)
self.total_loss = tf.contrib.losses.get_total_loss()
self.learning_rate = tf.train.exponential_decay(settings.learning_rate, self.batch * settings.batch_size, settings.decay_step, settings.decay_rate, True)
self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate).minimize(self.total_loss, global_step = self.batch)
def build_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):
with tf.variable_scope(scope):
with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):
net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')
net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')
net = slim.conv2d(net, 192, 3, scope = 'conv_4')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')
net = slim.conv2d(net, 128, 1, scope = 'conv_6')
net = slim.conv2d(net, 256, 3, scope = 'conv_7')
net = slim.conv2d(net, 256, 1, scope = 'conv_8')
net = slim.conv2d(net, 512, 3, scope = 'conv_9')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')
net = slim.conv2d(net, 256, 1, scope = 'conv_11')
net = slim.conv2d(net, 512, 3, scope = 'conv_12')
net = slim.conv2d(net, 256, 1, scope = 'conv_13')
net = slim.conv2d(net, 512, 3, scope = 'conv_14')
net = slim.conv2d(net, 256, 1, scope = 'conv_15')
net = slim.conv2d(net, 512, 3, scope = 'conv_16')
net = slim.conv2d(net, 256, 1, scope = 'conv_17')
net = slim.conv2d(net, 512, 3, scope = 'conv_18')
net = slim.conv2d(net, 512, 1, scope = 'conv_19')
net = slim.conv2d(net, 1024, 3, scope = 'conv_20')
net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')
net = slim.conv2d(net, 512, 1, scope = 'conv_22')
net = slim.conv2d(net, 1024, 3, scope = 'conv_23')
net = slim.conv2d(net, 512, 1, scope = 'conv_24')
net = slim.conv2d(net, 1024, 3, scope = 'conv_25')
net = slim.conv2d(net, 1024, 3, scope = 'conv_26')
net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')
net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')
net = slim.conv2d(net, 1024, 3, scope = 'conv_29')
net = slim.conv2d(net, 1024, 3, scope = 'conv_30')
net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')
net = slim.flatten(net, scope = 'flat_32')
net = slim.fully_connected(net, 512, scope = 'fc_33')
net = slim.fully_connected(net, 4096, scope = 'fc_34')
net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')
net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')
return net
def build_fast_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):
with tf.variable_scope(scope):
with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):
net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')
net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')
net = slim.conv2d(net, 192, 3, scope = 'conv_4')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')
net = slim.conv2d(net, 128, 1, scope = 'conv_6')
net = slim.conv2d(net, 256, 3, scope = 'conv_7')
net = slim.conv2d(net, 512, 3, scope = 'conv_9')
net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')
net = slim.conv2d(net, 256, 1, scope = 'conv_11')
net = slim.conv2d(net, 512, 3, scope = 'conv_12')
net = slim.conv2d(net, 1024, 3, scope = 'conv_20')
net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')
net = slim.conv2d(net, 512, 1, scope = 'conv_22')
net = slim.conv2d(net, 1024, 3, scope = 'conv_23')
net = slim.conv2d(net, 1024, 3, scope = 'conv_26')
net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')
net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')
net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')
net = slim.flatten(net, scope = 'flat_32')
net = slim.fully_connected(net, 512, scope = 'fc_33')
net = slim.fully_connected(net, 4096, scope = 'fc_34')
net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')
net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')
return net
def calc_iou(self, boxes1, boxes2, scope = 'iou'):
with tf.variable_scope(scope):
boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2] / 2.0,
boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / 2.0,
boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0,
boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])
boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])
boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2] / 2.0,
boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / 2.0,
boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0,
boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])
boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])
lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])
rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])
intersection = tf.maximum(0.0, rd - lu)
inter_square = intersection[:, :, :, :, 0] * intersection[:, :, :, :, 1]
square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1[:, :, :, :, 3] - boxes1[:, :, :, :, 1])
square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2[:, :, :, :, 3] - boxes2[:, :, :, :, 1])
union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)
return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)
def loss_layer(self, predicts, labels, scope = 'loss_layer'):
with tf.variable_scope(scope):
predict_classes = tf.reshape(predicts[:, :self.boundary1], [settings.batch_size, self.cell_size, self.cell_size, self.num_classes])
predict_scales = tf.reshape(predicts[:, self.boundary1:self.boundary2], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell])
predict_boxes = tf.reshape(predicts[:, self.boundary2:], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell, 4])
response = tf.reshape(labels[:, :, :, 0], [settings.batch_size, self.cell_size, self.cell_size, 1])
boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size, self.cell_size, self.cell_size, 1, 4])
boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]) / self.image_size
classes = labels[:, :, :, 5:]
offset = tf.constant(self.offset, dtype = tf.float32)
offset = tf.reshape(offset, [1, self.cell_size, self.cell_size, self.boxes_per_cell])
offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])
predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] + offset) / self.cell_size,
(predict_boxes[:, :, :, :, 1] + tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size,
tf.square(predict_boxes[:, :, :, :, 2]),
tf.square(predict_boxes[:, :, :, :, 3])])
predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3, 4, 0])
iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)
object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)
object_mask = tf.cast((iou_predict_truth >= object_mask), tf.float32) * response
noobject_mask = tf.ones_like(object_mask, dtype=tf.float32) - object_mask
boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size - offset,
boxes[:, :, :, :, 1] * self.cell_size - tf.transpose(offset, (0, 2, 1, 3)),
tf.sqrt(boxes[:, :, :, :, 2]),
tf.sqrt(boxes[:, :, :, :, 3])])
boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])
class_delta = response * (predict_classes - classes)
class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta), axis=[1, 2, 3]), name = 'class_loss') * self.class_scale
object_delta = object_mask * (predict_scales - iou_predict_truth)
object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(object_delta), axis=[1, 2, 3]), name = 'object_loss') * self.object_scale
noobject_delta = noobject_mask * predict_scales
noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(noobject_delta), axis=[1, 2, 3]), name = 'noobject_loss') * self.no_object_scale
coord_mask = tf.expand_dims(object_mask, 4)
boxes_delta = coord_mask * (predict_boxes - boxes_tran)
coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta), axis=[1, 2, 3, 4]), name = 'coord_loss') * self.coord_scale
tf.contrib.losses.add_loss(class_loss)
tf.contrib.losses.add_loss(object_loss)
tf.contrib.losses.add_loss(noobject_loss)
tf.contrib.losses.add_loss(coord_loss)
def leaky_relu(alpha):
def op(inputs):
return tf.maximum(alpha * inputs, inputs)
return op
|
normal
|
{
"blob_id": "8ccec24e1a7060269ffbb376ba0c480da9eabe0a",
"index": 819,
"step-1": "<mask token>\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"step-3": "<mask token>\nslim = tf.contrib.slim\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"step-4": "import tensorflow as tf\nimport settings\nimport numpy as np\nslim = tf.contrib.slim\n\n\nclass Model:\n\n def __init__(self, training=True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = self.cell_size * self.cell_size * (self.\n num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = (self.boundary1 + self.cell_size * self.cell_size *\n self.boxes_per_cell)\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n self.offset = np.transpose(np.reshape(np.array([np.arange(self.\n cell_size)] * self.cell_size * self.boxes_per_cell), (self.\n boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n self.images = tf.placeholder(tf.float32, [None, settings.image_size,\n settings.image_size, 3])\n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs=self.\n output_size, alpha=settings.alpha_relu, training=training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs=\n self.output_size, alpha=settings.alpha_relu, training=training)\n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size,\n self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.\n learning_rate, self.batch * settings.batch_size, settings.\n decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.\n learning_rate).minimize(self.total_loss, global_step=self.batch\n )\n\n def build_network(self, images, num_outputs, alpha, keep_prob=settings.\n dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 256, 1, scope='conv_8')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 256, 1, scope='conv_13')\n net = slim.conv2d(net, 512, 3, scope='conv_14')\n net = slim.conv2d(net, 256, 1, scope='conv_15')\n net = slim.conv2d(net, 512, 3, scope='conv_16')\n net = slim.conv2d(net, 256, 1, scope='conv_17')\n net = slim.conv2d(net, 512, 3, scope='conv_18')\n net = slim.conv2d(net, 512, 1, scope='conv_19')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 512, 1, scope='conv_24')\n net = slim.conv2d(net, 1024, 3, scope='conv_25')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope='conv_29')\n net = slim.conv2d(net, 1024, 3, scope='conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def build_fast_network(self, images, num_outputs, alpha, keep_prob=\n settings.dropout, training=True, scope='yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected],\n activation_fn=leaky_relu(alpha), weights_initializer=tf.\n truncated_normal_initializer(0.0, 0.01),\n weights_regularizer=slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, \n 0]]), name='pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding='VALID', scope=\n 'conv_2')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_3')\n net = slim.conv2d(net, 192, 3, scope='conv_4')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_5')\n net = slim.conv2d(net, 128, 1, scope='conv_6')\n net = slim.conv2d(net, 256, 3, scope='conv_7')\n net = slim.conv2d(net, 512, 3, scope='conv_9')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_10')\n net = slim.conv2d(net, 256, 1, scope='conv_11')\n net = slim.conv2d(net, 512, 3, scope='conv_12')\n net = slim.conv2d(net, 1024, 3, scope='conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope='pool_21')\n net = slim.conv2d(net, 512, 1, scope='conv_22')\n net = slim.conv2d(net, 1024, 3, scope='conv_23')\n net = slim.conv2d(net, 1024, 3, scope='conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]\n ), name='pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope=\n 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope='flat_32')\n net = slim.fully_connected(net, 512, scope='fc_33')\n net = slim.fully_connected(net, 4096, scope='fc_34')\n net = slim.dropout(net, keep_prob=keep_prob, is_training=\n training, scope='dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn=\n None, scope='fc_36')\n return net\n\n def calc_iou(self, boxes1, boxes2, scope='iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2\n ] / 2.0, boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / \n 2.0, boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0, \n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2\n ] / 2.0, boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / \n 2.0, boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0, \n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :,\n :, :, 1]\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1\n [:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2\n [:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope='loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.\n boundary2], [settings.batch_size, self.cell_size, self.\n cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [\n settings.batch_size, self.cell_size, self.cell_size, self.\n boxes_per_cell, 4])\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size,\n self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size,\n self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]\n ) / self.image_size\n classes = labels[:, :, :, 5:]\n offset = tf.constant(self.offset, dtype=tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size,\n self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] +\n offset) / self.cell_size, (predict_boxes[:, :, :, :, 1] +\n tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size, tf.\n square(predict_boxes[:, :, :, :, 2]), tf.square(\n predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3,\n 4, 0])\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast(iou_predict_truth >= object_mask, tf.float32\n ) * response\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32\n ) - object_mask\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size -\n offset, boxes[:, :, :, :, 1] * self.cell_size - tf.\n transpose(offset, (0, 2, 1, 3)), tf.sqrt(boxes[:, :, :, :, \n 2]), tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta\n ), axis=[1, 2, 3]), name='class_loss') * self.class_scale\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n object_delta), axis=[1, 2, 3]), name='object_loss'\n ) * self.object_scale\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(\n noobject_delta), axis=[1, 2, 3]), name='noobject_loss'\n ) * self.no_object_scale\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta\n ), axis=[1, 2, 3, 4]), name='coord_loss') * self.coord_scale\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\n\ndef leaky_relu(alpha):\n\n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"step-5": "import tensorflow as tf\nimport settings\nimport numpy as np\n\nslim = tf.contrib.slim\n\nclass Model:\n \n def __init__(self, training = True):\n self.classes = settings.classes_name\n self.num_classes = len(settings.classes_name)\n self.image_size = settings.image_size\n self.cell_size = settings.cell_size\n self.boxes_per_cell = settings.box_per_cell\n self.output_size = (self.cell_size * self.cell_size) * (self.num_classes + self.boxes_per_cell * 5)\n self.scale = 1.0 * self.image_size / self.cell_size\n self.boundary1 = self.cell_size * self.cell_size * self.num_classes\n self.boundary2 = self.boundary1 + self.cell_size * self.cell_size * self.boxes_per_cell\n\n self.object_scale = settings.object_scale\n self.no_object_scale = settings.no_object_scale\n self.class_scale = settings.class_scale\n self.coord_scale = settings.coordinate_scale\n \n self.offset = np.transpose(np.reshape(np.array([np.arange(self.cell_size)] * self.cell_size * self.boxes_per_cell), (self.boxes_per_cell, self.cell_size, self.cell_size)), (1, 2, 0))\n\n self.images = tf.placeholder(tf.float32, [None, settings.image_size, settings.image_size, 3])\n \n if settings.model_type == 'normal':\n self.logits = self.build_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)\n if settings.model_type == 'fast':\n self.logits = self.build_fast_network(self.images, num_outputs = self.output_size, alpha = settings.alpha_relu, training = training)\n \n if training:\n self.batch = tf.Variable(0)\n self.labels = tf.placeholder(tf.float32, [None, self.cell_size, self.cell_size, 5 + self.num_classes])\n self.loss_layer(self.logits, self.labels)\n self.total_loss = tf.contrib.losses.get_total_loss()\n self.learning_rate = tf.train.exponential_decay(settings.learning_rate, self.batch * settings.batch_size, settings.decay_step, settings.decay_rate, True)\n self.optimizer = tf.train.GradientDescentOptimizer(self.learning_rate).minimize(self.total_loss, global_step = self.batch)\n \n def build_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')\n net = slim.conv2d(net, 192, 3, scope = 'conv_4')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')\n net = slim.conv2d(net, 128, 1, scope = 'conv_6')\n net = slim.conv2d(net, 256, 3, scope = 'conv_7')\n net = slim.conv2d(net, 256, 1, scope = 'conv_8')\n net = slim.conv2d(net, 512, 3, scope = 'conv_9')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')\n net = slim.conv2d(net, 256, 1, scope = 'conv_11')\n net = slim.conv2d(net, 512, 3, scope = 'conv_12')\n net = slim.conv2d(net, 256, 1, scope = 'conv_13')\n net = slim.conv2d(net, 512, 3, scope = 'conv_14')\n net = slim.conv2d(net, 256, 1, scope = 'conv_15')\n net = slim.conv2d(net, 512, 3, scope = 'conv_16')\n net = slim.conv2d(net, 256, 1, scope = 'conv_17')\n net = slim.conv2d(net, 512, 3, scope = 'conv_18')\n net = slim.conv2d(net, 512, 1, scope = 'conv_19')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')\n net = slim.conv2d(net, 512, 1, scope = 'conv_22')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_23')\n net = slim.conv2d(net, 512, 1, scope = 'conv_24')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_25')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_29')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_30')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope = 'flat_32')\n net = slim.fully_connected(net, 512, scope = 'fc_33')\n net = slim.fully_connected(net, 4096, scope = 'fc_34')\n net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')\n return net\n \n def build_fast_network(self, images, num_outputs, alpha, keep_prob = settings.dropout, training = True, scope = 'yolo'):\n with tf.variable_scope(scope):\n with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn = leaky_relu(alpha), weights_initializer = tf.truncated_normal_initializer(0.0, 0.01), weights_regularizer = slim.l2_regularizer(0.0005)):\n net = tf.pad(images, np.array([[0, 0], [3, 3], [3, 3], [0, 0]]), name = 'pad_1')\n net = slim.conv2d(net, 64, 7, 2, padding = 'VALID', scope = 'conv_2')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_3')\n net = slim.conv2d(net, 192, 3, scope = 'conv_4')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_5')\n net = slim.conv2d(net, 128, 1, scope = 'conv_6')\n net = slim.conv2d(net, 256, 3, scope = 'conv_7')\n net = slim.conv2d(net, 512, 3, scope = 'conv_9')\n net = slim.max_pool2d(net, 2, padding = 'SAME', scope = 'pool_10')\n net = slim.conv2d(net, 256, 1, scope = 'conv_11')\n net = slim.conv2d(net, 512, 3, scope = 'conv_12')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_20')\n net = slim.max_pool2d(net, 2, padding='SAME', scope = 'pool_21')\n net = slim.conv2d(net, 512, 1, scope = 'conv_22')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_23')\n net = slim.conv2d(net, 1024, 3, scope = 'conv_26')\n net = tf.pad(net, np.array([[0, 0], [1, 1], [1, 1], [0, 0]]), name = 'pad_27')\n net = slim.conv2d(net, 1024, 3, 2, padding='VALID', scope = 'conv_28')\n net = tf.transpose(net, [0, 3, 1, 2], name='trans_31')\n net = slim.flatten(net, scope = 'flat_32')\n net = slim.fully_connected(net, 512, scope = 'fc_33')\n net = slim.fully_connected(net, 4096, scope = 'fc_34')\n net = slim.dropout(net, keep_prob = keep_prob, is_training = training, scope = 'dropout_35')\n net = slim.fully_connected(net, num_outputs, activation_fn = None, scope = 'fc_36')\n return net\n \n \n def calc_iou(self, boxes1, boxes2, scope = 'iou'):\n with tf.variable_scope(scope):\n boxes1 = tf.stack([boxes1[:, :, :, :, 0] - boxes1[:, :, :, :, 2] / 2.0,\n boxes1[:, :, :, :, 1] - boxes1[:, :, :, :, 3] / 2.0,\n boxes1[:, :, :, :, 0] + boxes1[:, :, :, :, 2] / 2.0,\n boxes1[:, :, :, :, 1] + boxes1[:, :, :, :, 3] / 2.0])\n boxes1 = tf.transpose(boxes1, [1, 2, 3, 4, 0])\n\n boxes2 = tf.stack([boxes2[:, :, :, :, 0] - boxes2[:, :, :, :, 2] / 2.0,\n boxes2[:, :, :, :, 1] - boxes2[:, :, :, :, 3] / 2.0,\n boxes2[:, :, :, :, 0] + boxes2[:, :, :, :, 2] / 2.0,\n boxes2[:, :, :, :, 1] + boxes2[:, :, :, :, 3] / 2.0])\n boxes2 = tf.transpose(boxes2, [1, 2, 3, 4, 0])\n\n lu = tf.maximum(boxes1[:, :, :, :, :2], boxes2[:, :, :, :, :2])\n rd = tf.minimum(boxes1[:, :, :, :, 2:], boxes2[:, :, :, :, 2:])\n\n intersection = tf.maximum(0.0, rd - lu)\n inter_square = intersection[:, :, :, :, 0] * intersection[:, :, :, :, 1]\n\n square1 = (boxes1[:, :, :, :, 2] - boxes1[:, :, :, :, 0]) * (boxes1[:, :, :, :, 3] - boxes1[:, :, :, :, 1])\n square2 = (boxes2[:, :, :, :, 2] - boxes2[:, :, :, :, 0]) * (boxes2[:, :, :, :, 3] - boxes2[:, :, :, :, 1])\n\n union_square = tf.maximum(square1 + square2 - inter_square, 1e-10)\n\n return tf.clip_by_value(inter_square / union_square, 0.0, 1.0)\n\n def loss_layer(self, predicts, labels, scope = 'loss_layer'):\n with tf.variable_scope(scope):\n predict_classes = tf.reshape(predicts[:, :self.boundary1], [settings.batch_size, self.cell_size, self.cell_size, self.num_classes])\n predict_scales = tf.reshape(predicts[:, self.boundary1:self.boundary2], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell])\n predict_boxes = tf.reshape(predicts[:, self.boundary2:], [settings.batch_size, self.cell_size, self.cell_size, self.boxes_per_cell, 4])\n\n response = tf.reshape(labels[:, :, :, 0], [settings.batch_size, self.cell_size, self.cell_size, 1])\n boxes = tf.reshape(labels[:, :, :, 1:5], [settings.batch_size, self.cell_size, self.cell_size, 1, 4])\n boxes = tf.tile(boxes, [1, 1, 1, self.boxes_per_cell, 1]) / self.image_size\n classes = labels[:, :, :, 5:]\n\n offset = tf.constant(self.offset, dtype = tf.float32)\n offset = tf.reshape(offset, [1, self.cell_size, self.cell_size, self.boxes_per_cell])\n offset = tf.tile(offset, [settings.batch_size, 1, 1, 1])\n predict_boxes_tran = tf.stack([(predict_boxes[:, :, :, :, 0] + offset) / self.cell_size,\n (predict_boxes[:, :, :, :, 1] + tf.transpose(offset, (0, 2, 1, 3))) / self.cell_size,\n tf.square(predict_boxes[:, :, :, :, 2]),\n tf.square(predict_boxes[:, :, :, :, 3])])\n predict_boxes_tran = tf.transpose(predict_boxes_tran, [1, 2, 3, 4, 0])\n\n iou_predict_truth = self.calc_iou(predict_boxes_tran, boxes)\n\n object_mask = tf.reduce_max(iou_predict_truth, 3, keep_dims=True)\n object_mask = tf.cast((iou_predict_truth >= object_mask), tf.float32) * response\n\n noobject_mask = tf.ones_like(object_mask, dtype=tf.float32) - object_mask\n\n boxes_tran = tf.stack([boxes[:, :, :, :, 0] * self.cell_size - offset,\n boxes[:, :, :, :, 1] * self.cell_size - tf.transpose(offset, (0, 2, 1, 3)),\n tf.sqrt(boxes[:, :, :, :, 2]),\n tf.sqrt(boxes[:, :, :, :, 3])])\n boxes_tran = tf.transpose(boxes_tran, [1, 2, 3, 4, 0])\n\n class_delta = response * (predict_classes - classes)\n class_loss = tf.reduce_mean(tf.reduce_sum(tf.square(class_delta), axis=[1, 2, 3]), name = 'class_loss') * self.class_scale\n\n object_delta = object_mask * (predict_scales - iou_predict_truth)\n object_loss = tf.reduce_mean(tf.reduce_sum(tf.square(object_delta), axis=[1, 2, 3]), name = 'object_loss') * self.object_scale\n\n noobject_delta = noobject_mask * predict_scales\n noobject_loss = tf.reduce_mean(tf.reduce_sum(tf.square(noobject_delta), axis=[1, 2, 3]), name = 'noobject_loss') * self.no_object_scale\n\n coord_mask = tf.expand_dims(object_mask, 4)\n boxes_delta = coord_mask * (predict_boxes - boxes_tran)\n coord_loss = tf.reduce_mean(tf.reduce_sum(tf.square(boxes_delta), axis=[1, 2, 3, 4]), name = 'coord_loss') * self.coord_scale\n\n tf.contrib.losses.add_loss(class_loss)\n tf.contrib.losses.add_loss(object_loss)\n tf.contrib.losses.add_loss(noobject_loss)\n tf.contrib.losses.add_loss(coord_loss)\n\ndef leaky_relu(alpha):\n \n def op(inputs):\n return tf.maximum(alpha * inputs, inputs)\n return op\n",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
from yama.record import Record
class MongoStorage(object):
_collection = None
_connection = None
_root_id = None
_roots = None
def __init__(self, connection):
self._connection = connection
self._collection = connection.objects
self._roots = connection.roots
root_doc = self._roots.find_one()
if root_doc is None:
self._root_id = self._roots.save({'list': []})
else:
self._root_id = root_doc['_id']
def add_to_roots(self, container_id):
self._roots.update({'_id': self._root_id}, {'$push': {'list':
container_id}})
def store_new_item(self, doc):
"""Save the new document."""
self._collection.save(doc.document)
def store_child(self, child_id, parent_id):
self._collection.update({'_id': parent_id}, {'$push': {'contents':
child_id}})
def get_root_ids(self):
return self._roots.find_one(self._root_id)['list']
def load_one_item(self, item_id):
return Record.from_document(self._collection.find_one(item_id))
def load_many_items(self, item_ids):
query = {'_id': {'$in': item_ids}}
results = dict((d['_id'], Record.from_document(d)) for d in self.
_collection.find(query))
return (results[i] for i in item_ids)
|
normal
|
{
"blob_id": "816c11717c4f26b9013f7a83e1dfb2c0578cbcf8",
"index": 1269,
"step-1": "<mask token>\n\n\nclass MongoStorage(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, connection):\n self._connection = connection\n self._collection = connection.objects\n self._roots = connection.roots\n root_doc = self._roots.find_one()\n if root_doc is None:\n self._root_id = self._roots.save({'list': []})\n else:\n self._root_id = root_doc['_id']\n\n def add_to_roots(self, container_id):\n self._roots.update({'_id': self._root_id}, {'$push': {'list':\n container_id}})\n\n def store_new_item(self, doc):\n \"\"\"Save the new document.\"\"\"\n self._collection.save(doc.document)\n\n def store_child(self, child_id, parent_id):\n self._collection.update({'_id': parent_id}, {'$push': {'contents':\n child_id}})\n <mask token>\n\n def load_one_item(self, item_id):\n return Record.from_document(self._collection.find_one(item_id))\n\n def load_many_items(self, item_ids):\n query = {'_id': {'$in': item_ids}}\n results = dict((d['_id'], Record.from_document(d)) for d in self.\n _collection.find(query))\n return (results[i] for i in item_ids)\n",
"step-2": "<mask token>\n\n\nclass MongoStorage(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, connection):\n self._connection = connection\n self._collection = connection.objects\n self._roots = connection.roots\n root_doc = self._roots.find_one()\n if root_doc is None:\n self._root_id = self._roots.save({'list': []})\n else:\n self._root_id = root_doc['_id']\n\n def add_to_roots(self, container_id):\n self._roots.update({'_id': self._root_id}, {'$push': {'list':\n container_id}})\n\n def store_new_item(self, doc):\n \"\"\"Save the new document.\"\"\"\n self._collection.save(doc.document)\n\n def store_child(self, child_id, parent_id):\n self._collection.update({'_id': parent_id}, {'$push': {'contents':\n child_id}})\n\n def get_root_ids(self):\n return self._roots.find_one(self._root_id)['list']\n\n def load_one_item(self, item_id):\n return Record.from_document(self._collection.find_one(item_id))\n\n def load_many_items(self, item_ids):\n query = {'_id': {'$in': item_ids}}\n results = dict((d['_id'], Record.from_document(d)) for d in self.\n _collection.find(query))\n return (results[i] for i in item_ids)\n",
"step-3": "<mask token>\n\n\nclass MongoStorage(object):\n _collection = None\n _connection = None\n _root_id = None\n _roots = None\n\n def __init__(self, connection):\n self._connection = connection\n self._collection = connection.objects\n self._roots = connection.roots\n root_doc = self._roots.find_one()\n if root_doc is None:\n self._root_id = self._roots.save({'list': []})\n else:\n self._root_id = root_doc['_id']\n\n def add_to_roots(self, container_id):\n self._roots.update({'_id': self._root_id}, {'$push': {'list':\n container_id}})\n\n def store_new_item(self, doc):\n \"\"\"Save the new document.\"\"\"\n self._collection.save(doc.document)\n\n def store_child(self, child_id, parent_id):\n self._collection.update({'_id': parent_id}, {'$push': {'contents':\n child_id}})\n\n def get_root_ids(self):\n return self._roots.find_one(self._root_id)['list']\n\n def load_one_item(self, item_id):\n return Record.from_document(self._collection.find_one(item_id))\n\n def load_many_items(self, item_ids):\n query = {'_id': {'$in': item_ids}}\n results = dict((d['_id'], Record.from_document(d)) for d in self.\n _collection.find(query))\n return (results[i] for i in item_ids)\n",
"step-4": "from yama.record import Record\n\n\nclass MongoStorage(object):\n _collection = None\n _connection = None\n _root_id = None\n _roots = None\n\n def __init__(self, connection):\n self._connection = connection\n self._collection = connection.objects\n self._roots = connection.roots\n root_doc = self._roots.find_one()\n if root_doc is None:\n self._root_id = self._roots.save({'list': []})\n else:\n self._root_id = root_doc['_id']\n\n def add_to_roots(self, container_id):\n self._roots.update({'_id': self._root_id}, {'$push': {'list':\n container_id}})\n\n def store_new_item(self, doc):\n \"\"\"Save the new document.\"\"\"\n self._collection.save(doc.document)\n\n def store_child(self, child_id, parent_id):\n self._collection.update({'_id': parent_id}, {'$push': {'contents':\n child_id}})\n\n def get_root_ids(self):\n return self._roots.find_one(self._root_id)['list']\n\n def load_one_item(self, item_id):\n return Record.from_document(self._collection.find_one(item_id))\n\n def load_many_items(self, item_ids):\n query = {'_id': {'$in': item_ids}}\n results = dict((d['_id'], Record.from_document(d)) for d in self.\n _collection.find(query))\n return (results[i] for i in item_ids)\n",
"step-5": null,
"step-ids": [
7,
8,
9,
10
]
}
|
[
7,
8,
9,
10
] |
'''
Copyright
Jelen forráskód a Budapesti Műszaki és Gazdaságtudományi Egyetemen tartott
"Deep Learning a gyakorlatban Python és LUA alapon" tantárgy segédanyagaként készült.
A tantárgy honlapja: http://smartlab.tmit.bme.hu/oktatas-deep-learning
Deep Learning kutatás: http://smartlab.tmit.bme.hu/deep-learning
A forráskódot GPLv3 licensz védi. Újrafelhasználás esetén lehetőség szerint kérjük
az alábbi szerzőt értesíteni.
2018 (c) Csapó Tamás Gábor (csapot kukac tmit pont bme pont hu),
Gyires-Tóth Bálint, Zainkó Csaba
Links:
[hyperas] https://github.com/maxpumperla/hyperas
'''
# !pip3 install hyperas
# based on https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py
import hyperas
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping, CSVLogger
import numpy as np
# do not use all GPU memory
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.datasets import cifar10
# hiperparaméter optimalizálás hyperas-sal (https://github.com/maxpumperla/hyperas)
# a hyperas-nak kell két függvény:
# -- data() : adatok betöltése
# -- create_model() : hálózat modell
def data():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
num_classes = 10
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# reshape for FC-DNN
x_train = np.reshape(x_train,(50000,3072)) # 32x32x3
x_test = np.reshape(x_test,(10000,3072))
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
# Normalization of pixel values (to [0-1] range)
x_train /= 255
x_test /= 255
return x_train, y_train, x_test, y_test
def create_model(x_train, y_train, x_test, y_test):
n_layer1 = {{choice([128, 256, 512])}}
n_layer2 = {{choice([128, 256, 512])}}
dropout_1 = {{uniform(0, 1)}}
dropout_2 = {{uniform(0, 1)}}
optim = {{choice(['rmsprop', 'adam', 'sgd'])}}
n_batch = {{choice([64, 128, 256])}}
print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1, dropout_2, optim, n_batch)
# 3 x 3 x [0-1]x[0-1] x 3 x 3 = kb 8100 kombináció
model = Sequential()
model.add(Dense(n_layer1, activation='relu', input_dim=3072))
model.add(Dropout(dropout_1))
model.add(Dense(n_layer2, activation='relu'))
model.add(Dropout(dropout_2))
model.add(Dense(10, activation='softmax'))
model.compile(optimizer=optim,
loss='categorical_crossentropy',
metrics=['accuracy'])
import datetime
current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime.now())
print(current_date)
csv_name = '13_hyperas_cifar10_' + current_date + '_' + \
str(n_layer1) + '_' + str(n_layer2) + '_' + \
str(dropout_1) + '_' + str(dropout_2) + '_' + \
str(optim) + '_' + str(n_batch) + '.csv'
callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0), \
CSVLogger(csv_name, append=True, separator=';')]
result = model.fit(x_train, y_train,
batch_size=n_batch,
epochs=100,
verbose=2,
validation_data=(x_test, y_test),
callbacks=callbacks,
shuffle=True)
validation_acc = np.amax(result.history['val_acc'])
print('Best validation acc of epoch:', validation_acc)
return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform
# main hyperopt part
# az algoritmus lehet:
# -- random.suggest -> random search
# -- tpe.suggest -> tree parsen estimator
best_run, best_model = optim.minimize(model=create_model,
data=data,
algo=tpe.suggest,
max_evals=5,
trials=Trials())
x_train, y_train, x_test, y_test = data()
print("Evalutation of best performing model:")
print(best_model.evaluate(x_test, y_test))
print("Best performing model chosen hyper-parameters:")
print(best_run)
|
normal
|
{
"blob_id": "cc097b4d2a5a521a0adb83ca1b58470b4ce84f39",
"index": 7143,
"step-1": "<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\n",
"step-2": "<mask token>\nset_session(tf.Session(config=config))\n<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-3": "<mask token>\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\n<mask token>\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\n<mask token>\nbest_run, best_model = optim.minimize(model=create_model, data=data, algo=\n tpe.suggest, max_evals=5, trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-4": "<mask token>\nimport hyperas\nimport keras\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.optimizers import SGD\nfrom keras.callbacks import EarlyStopping, CSVLogger\nimport numpy as np\nimport tensorflow as tf\nfrom keras.backend.tensorflow_backend import set_session\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\nfrom keras.datasets import cifar10\n\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n num_classes = 10\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n x_train = np.reshape(x_train, (50000, 3072))\n x_test = np.reshape(x_test, (10000, 3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1,\n dropout_2, optim, n_batch)\n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n model.compile(optimizer=optim, loss='categorical_crossentropy', metrics\n =['accuracy'])\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime\n .now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + str(n_layer1\n ) + '_' + str(n_layer2) + '_' + str(dropout_1) + '_' + str(dropout_2\n ) + '_' + str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0),\n CSVLogger(csv_name, append=True, separator=';')]\n result = model.fit(x_train, y_train, batch_size=n_batch, epochs=100,\n verbose=2, validation_data=(x_test, y_test), callbacks=callbacks,\n shuffle=True)\n validation_acc = np.amax(result.history['val_acc'])\n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\n\nfrom hyperopt import Trials, STATUS_OK, tpe\nfrom hyperas import optim\nfrom hyperas.distributions import choice, uniform\nbest_run, best_model = optim.minimize(model=create_model, data=data, algo=\n tpe.suggest, max_evals=5, trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint('Evalutation of best performing model:')\nprint(best_model.evaluate(x_test, y_test))\nprint('Best performing model chosen hyper-parameters:')\nprint(best_run)\n",
"step-5": "'''\nCopyright\n\nJelen forráskód a Budapesti Műszaki és Gazdaságtudományi Egyetemen tartott\n\"Deep Learning a gyakorlatban Python és LUA alapon\" tantárgy segédanyagaként készült.\n\nA tantárgy honlapja: http://smartlab.tmit.bme.hu/oktatas-deep-learning\nDeep Learning kutatás: http://smartlab.tmit.bme.hu/deep-learning\n\nA forráskódot GPLv3 licensz védi. Újrafelhasználás esetén lehetőség szerint kérjük\naz alábbi szerzőt értesíteni.\n\n2018 (c) Csapó Tamás Gábor (csapot kukac tmit pont bme pont hu),\nGyires-Tóth Bálint, Zainkó Csaba\n\n\nLinks:\n [hyperas] https://github.com/maxpumperla/hyperas\n'''\n\n# !pip3 install hyperas\n\n# based on https://github.com/keras-team/keras/blob/master/examples/cifar10_cnn.py\n\nimport hyperas\n\nimport keras\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.optimizers import SGD\nfrom keras.callbacks import EarlyStopping, CSVLogger\nimport numpy as np\n\n# do not use all GPU memory\nimport tensorflow as tf\nfrom keras.backend.tensorflow_backend import set_session\nconfig = tf.ConfigProto()\nconfig.gpu_options.allow_growth = True\nset_session(tf.Session(config=config))\n\n\nfrom keras.datasets import cifar10\n\n\n# hiperparaméter optimalizálás hyperas-sal (https://github.com/maxpumperla/hyperas)\n\n# a hyperas-nak kell két függvény:\n# -- data() : adatok betöltése\n# -- create_model() : hálózat modell\n\ndef data():\n (x_train, y_train), (x_test, y_test) = cifar10.load_data()\n\n num_classes = 10\n\n # Convert class vectors to binary class matrices.\n y_train = keras.utils.to_categorical(y_train, num_classes)\n y_test = keras.utils.to_categorical(y_test, num_classes)\n\n # reshape for FC-DNN\n x_train = np.reshape(x_train,(50000,3072)) # 32x32x3\n x_test = np.reshape(x_test,(10000,3072))\n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n\n # Normalization of pixel values (to [0-1] range)\n\n x_train /= 255\n x_test /= 255\n\n return x_train, y_train, x_test, y_test\n\n\ndef create_model(x_train, y_train, x_test, y_test):\n \n n_layer1 = {{choice([128, 256, 512])}}\n n_layer2 = {{choice([128, 256, 512])}}\n dropout_1 = {{uniform(0, 1)}}\n dropout_2 = {{uniform(0, 1)}}\n optim = {{choice(['rmsprop', 'adam', 'sgd'])}}\n n_batch = {{choice([64, 128, 256])}}\n \n print('Model hyperparameters: ', n_layer1, n_layer2, dropout_1, dropout_2, optim, n_batch)\n # 3 x 3 x [0-1]x[0-1] x 3 x 3 = kb 8100 kombináció\n \n model = Sequential()\n model.add(Dense(n_layer1, activation='relu', input_dim=3072))\n model.add(Dropout(dropout_1))\n model.add(Dense(n_layer2, activation='relu'))\n model.add(Dropout(dropout_2))\n model.add(Dense(10, activation='softmax'))\n \n model.compile(optimizer=optim,\n loss='categorical_crossentropy',\n metrics=['accuracy'])\n\n import datetime\n current_date = '{date:%Y-%m-%d_%H-%M-%S}'.format(date=datetime.datetime.now())\n print(current_date)\n csv_name = '13_hyperas_cifar10_' + current_date + '_' + \\\n str(n_layer1) + '_' + str(n_layer2) + '_' + \\\n str(dropout_1) + '_' + str(dropout_2) + '_' + \\\n str(optim) + '_' + str(n_batch) + '.csv'\n callbacks = [EarlyStopping(monitor='val_loss', patience=3, verbose=0), \\\n CSVLogger(csv_name, append=True, separator=';')]\n \n result = model.fit(x_train, y_train,\n batch_size=n_batch,\n epochs=100,\n verbose=2,\n validation_data=(x_test, y_test),\n callbacks=callbacks,\n shuffle=True)\n \n validation_acc = np.amax(result.history['val_acc']) \n print('Best validation acc of epoch:', validation_acc)\n return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}\n\nfrom hyperopt import Trials, STATUS_OK, tpe\nfrom hyperas import optim\nfrom hyperas.distributions import choice, uniform\n\n# main hyperopt part\n# az algoritmus lehet:\n# -- random.suggest -> random search\n# -- tpe.suggest -> tree parsen estimator\nbest_run, best_model = optim.minimize(model=create_model,\n data=data,\n algo=tpe.suggest,\n max_evals=5,\n trials=Trials())\nx_train, y_train, x_test, y_test = data()\nprint(\"Evalutation of best performing model:\")\nprint(best_model.evaluate(x_test, y_test))\nprint(\"Best performing model chosen hyper-parameters:\")\nprint(best_run)\n\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# app/__init__.py
import json
from flask_api import FlaskAPI, status
import graphene
from graphene import relay
from graphene_sqlalchemy import SQLAlchemyConnectionField, SQLAlchemyObjectType
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func
from flask import request, jsonify, abort, make_response
from flask_graphql import GraphQLView
from shapely.geometry import shape, Point
# local import
from instance.config import app_config
# For password hashing
from flask_bcrypt import Bcrypt
# initialize db
db = SQLAlchemy()
from app.models import Date, Area, LTESighting, SmallCell, Site, SightingsPerHourPerCountry, SightingsNew, SightingsBase, WideSighting, Journey
from app.models import Department as DepartmentModel
from app.ng_event_models import ZoneDistrict, AttractionTotal, Profile, PurchDistrict, DOWFrequency
class Department(SQLAlchemyObjectType):
class Meta:
model = DepartmentModel
interfaces = (relay.Node, )
class Query(graphene.ObjectType):
node = relay.Node.Field()
all_employees = SQLAlchemyConnectionField(Department)
def create_app(config_name):
app = FlaskAPI(__name__, instance_relative_config=True)
# overriding Werkzeugs built-in password hashing utilities using Bcrypt.
bcrypt = Bcrypt(app)
schema = graphene.Schema(query=Query)
app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql', schema=schema, graphiql=True))
app.config.from_object(app_config[config_name])
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
@app.route('/api/areas/create', methods=['POST'])
def create_areas():
# get the access token
name = request.data.get('name', '')
geodata = request.data.get('geodata', '')
center_lat = request.data.get('center_lat')
center_lng = request.data.get('center_lng')
zoom = request.data.get('zoom')
area = Area(name=name, geodata=geodata, center_lat=center_lat, center_lng=center_lng, zoom=zoom)
area.save()
response = jsonify({
'id': area.id,
'name': area.name,
'geodata': area.geodata,
'center_lat' : area.center_lat,
'center_lng' : area.center_lng,
'zoom' : area.zoom,
'date_created': area.date_created,
'date_modified': area.date_modified
})
return make_response(response), 201
@app.route('/api/areas/delete', methods=['POST'])
def delete_areas():
# get the access token
id = request.data.get('id', 0)
area = Area.query.filter_by(id=id).first()
if (area is not None):
area.delete()
return make_response(jsonify({'id':id})), 200
@app.route('/api/sightingsperhour', methods=['GET'])
def get_sightingsperhour():
# get all the areas
sightings = SightingsPerHourPerCountry.query.all()
results = []
for sighting in sightings:
results.append({'country' : sighting.country, 'hour' : sighting.hour, 'count' : sighting.count})
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sightingsnew', methods=['POST'])
def sightingsnew():
sightings = db.session.query(SightingsBase.site_id, SightingsBase.country, func.count(SightingsBase.roundedtoday))\
.filter(SightingsBase.site_id.in_(request.data['selectedRow']))\
.filter(SightingsBase.roundedtoday.between(request.data['selectedDates'][0], request.data['selectedDates'][1]))\
.group_by(SightingsBase.site_id, SightingsBase.country)\
.order_by(SightingsBase.site_id, func.count(SightingsBase.roundedtoday).desc())\
results = []
for sighting in sightings.all():
results.append({'country' : sighting.country, 'site_id' : sighting.site_id, 'count' : sighting[2]})
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/widesightingsnew', methods=['POST', 'GET'])
def widesightingsnew():
sightings = db.session.query(WideSighting.site_id, WideSighting.gender, func.count(WideSighting.gender))\
.filter(WideSighting.site_id.in_([138, 134]))\
.group_by(WideSighting.site_id, WideSighting.gender)
results = []
for sighting in sightings.all():
#gender = sighting.gender if len(sighting.gender) else 'unknown'
results.append({'site_id' : sighting.site_id, 'gender' : sighting.gender, 'count' : sighting[2]})
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/widesightings', methods=['GET'])
def widesightings():
sightings = WideSighting.get_all()
results = []
for sighting in sightings:
results.append(sighting.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sites', methods=['GET'])
def get_sites():
# get all the areas
sites = Site.get_all()
results = []
for site in sites:
results.append(site.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/dates', methods=['GET'])
def get_dates():
# get all the areas
dates = Date.get_all()
results = []
for date in dates:
results.append(date.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/areas', methods=['GET'])
def get_areas():
# get all the areas
areas = Area.get_all()
allSmallCells = SmallCell.get_all()
results = []
for area in areas:
smallcellInArea = []
for smallcell in allSmallCells:
smallcellInArea.append(smallcell.serialise())
obj = {
'id': area.id,
'name': area.name,
'date_created': area.date_created,
'date_modified': area.date_modified,
'center_lat' : area.center_lat,
'center_lng' : area.center_lng,
'zoom' : area.zoom,
'geodata': area.geodata,
'smallcells' : smallcellInArea
}
results.append(obj)
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/smallcells', methods=['GET'])
def get_smallcells():
allSmallCells = SmallCell.query.order_by(SmallCell.id).all()
results = []
for smallcell in allSmallCells:
results.append(smallcell.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/smallcells/update', methods=['POST'])
def update_smallcell():
smallcell_id = request.data.get('id', '')
site_id = request.data.get('site_id', '')
smallcell = SmallCell.query.filter_by(id=smallcell_id).first()
smallcell.site_id = site_id
smallcell.save()
return make_response(jsonify({ 'smallcell_id' : smallcell.id, 'site_id' : smallcell.site_id })), 200
@app.route('/api/sighting/byarea/<areaid>', methods=['GET'])
def get_sighting(areaid):
import string
area = Area.query.filter_by(id=areaid).first()
if area is None : return make_response(jsonify({ 'list' : [] })), 200
sites = []
for site in Site.get_all():
if area.contains(site):
sites.append(str(site.id))
def generate_random_data(num_rows):
import random
latitude = 51.51451110408478
longitude = -0.12620388576521444
result = []
for _ in range(num_rows):
dec_lat = random.random()/10
dec_lon = random.random()/10
result.append({'lat' : latitude + dec_lat, 'lng' : longitude + dec_lon})
return result
results = []
if (len(sites) > 0):
for row in db.session.execute('select * from get_gender_crossfilter(ARRAY[' + ','.join(sites) + '])'):
results.append(({ 'geos': generate_random_data(5), 'gender' : row['__gender'], 'age_range' : row['__age_range'], 'timestamp' : row['__sighting_date'], 'count' : row['__count'] }))
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sighting/getgender/', methods=['POST'])
def get_gender():
site_ids = str(request.data.get('site_ids', ''))
from_sighting_date = request.data.get('selectedDates')[0]
to_sighting_date = request.data.get('selectedDates')[1]
import string
results = []
for row in db.session.execute("select * from get_gender(ARRAY[" + site_ids + "]," + "'" + from_sighting_date + "'" + "," + "'" + to_sighting_date + "'" + ")"):
results.append(({ 'site_id' : row['__site_id'], 'date_month' : row['__date_month'], 'gender' : row['__gender'], 'age_range' : row['__age_range'], 'perc_visits' : row['__perc_visits'], 'scaled_visits' : row['__scaled_visits'] }))
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sighting/getgendertotals/', methods=['POST'])
def get_gender_age_totals():
site_ids = str(request.data.get('site_ids', ''))
from_sighting_date = request.data.get('selectedDates')[0]
to_sighting_date = request.data.get('selectedDates')[1]
import string
results = []
for row in db.session.execute("select * from get_gender_age_totals(ARRAY[" + site_ids + "]," + "'" + from_sighting_date + "'" + "," + "'" + to_sighting_date + "'" + ")"):
results.append(({ 'site_id' : row['__site_id'], 'gender' : row['__gender'], 'age_range' : row['__age_range'], '__visits' : row['__visits'] }))
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sighting', methods=['GET'])
def get_sightings():
results = []
for sighting in LTESighting.get_all():
results.append(sighting.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/sitescomparison', methods=['POST'])
def get_sitescomparison():
sightings = LTESighting.query\
.filter(LTESighting.smallcell.has(SmallCell.site_id.in_(request.data['selectedRow'])))\
.filter(LTESighting.timestamp.between(request.data['selectedDates'][0], request.data['selectedDates'][1]))
return make_response(jsonify({ 'list' : [sighting.serialise() for sighting in sightings] })), 200
@app.route('/api/sighting/bysite', methods=['GET'])
def get_sightings_by_site():
site_ids = (request.args.getlist('site_id'))
results = []
#should do this better with joins!
for sighting in LTESighting.query:
if (str(sighting.smallcell.site_id)) in site_ids : results.append(sighting.serialise())
return make_response(jsonify({ 'list' : results })), 200
@app.route('/api/origindestination/all', methods=['GET'])
def get_all():
journeys = Journey.query.all()
thing = {}
for journey in journeys:
if (journey.origin_id not in thing) :
thing[journey.origin_id] = {}
if (journey.destination_id not in thing[journey.origin_id] and journey.destination_id != journey.origin_id) :
thing[journey.origin_id][journey.destination_id] = journey.data['total']
return make_response(jsonify(thing)), 200
@app.route('/api/origindestination/<origin_id>', methods=['GET'])
def get_od(origin_id):
journeys = Journey.query.all()#.filter_by(origin_id=origin_id).all()
_j = []
for journey in journeys:
_j.append({'origin_id' : journey.origin_id, 'destination_id' : journey.destination_id, 'total' : journey.data['total']})
#_j.append({'origin_id' : journey.origin_id, 'data' : (journey.data)})
return make_response(jsonify({ 'list' : _j })), 200
@app.route('/api/ng_event/purchase/<home_district_name>/<type_visitor>', methods=['GET'])
def purchase(home_district_name, type_visitor):
days_sql = db.session.query(PurchDistrict.start_dow, func.count(PurchDistrict.start_dow))\
.group_by(PurchDistrict.start_dow)\
.filter(PurchDistrict.home_district_name.in_([home_district_name]))\
.filter(PurchDistrict.type_visitor.in_([type_visitor]))\
.order_by(func.count(PurchDistrict.start_dow).desc())\
.all()
gender_sql = db.session.query(PurchDistrict.gender, func.count(PurchDistrict.gender))\
.group_by(PurchDistrict.gender)\
.filter(PurchDistrict.home_district_name.in_([home_district_name]))\
.filter(PurchDistrict.type_visitor.in_([type_visitor])).all()
gender_age_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, func.count(PurchDistrict.gender))\
.group_by(PurchDistrict.gender, PurchDistrict.age)\
.filter(PurchDistrict.gender.isnot(None))\
.filter(PurchDistrict.age.isnot(None))\
.filter(PurchDistrict.home_district_name.in_([home_district_name]))\
.filter(PurchDistrict.type_visitor.in_([type_visitor])).all()
gender_age_rent_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict.gender))\
.group_by(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent)\
.filter(PurchDistrict.gender.isnot(None))\
.filter(PurchDistrict.age.isnot(None))\
.filter(PurchDistrict.type_visitor.in_([type_visitor])).all()
days_total = sum(i[1] for i in days_sql)
gender_total = sum(i[1] for i in gender_sql)
gender_age_total = sum(i[2] for i in gender_age_sql)
days_results = []
for result in days_sql:
days_results.append({ 'start_dow' : result.start_dow, 'count' : result[1], 'percent' : float(result[1])/float(days_total), 'total' : days_total})
gender_results = []
for result in gender_sql:
gender_results.append({'gender' : result.gender, 'count' : result[1], 'percent' : float(result[1])/float(gender_total)})
gender_age_results = []
for result in gender_age_sql:
gender_age_results.append({'gender' : result.gender, 'age' : result.age, 'count' : result[2], 'percent' : float(result[2])/float(gender_age_total)})
return make_response(jsonify({'days' : days_results, 'gender' : gender_results, 'gender_age' : gender_age_results})), 200
@app.route('/api/ng_event/purchase_affluence/<type_visitor>', methods=['GET'])
def purchase_rent(type_visitor):
gender_sql = db.session.query(PurchDistrict.gender, func.count(PurchDistrict.gender))\
.group_by(PurchDistrict.gender)\
.filter(PurchDistrict.type_visitor.in_([type_visitor])).all()
gender_age_rent_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict.gender))\
.group_by(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent)\
.filter(PurchDistrict.gender.isnot(None))\
.filter(PurchDistrict.age.isnot(None))\
.filter(PurchDistrict.type_visitor.in_([type_visitor])).all()
gender_total = sum(i[1] for i in gender_sql)
gender_results = []
for result in gender_sql:
gender_results.append({'gender' : result.gender, 'count' : result[1], 'percent' : float(result[1])/float(gender_total)})
gender_age_rent_results = []
for result in gender_age_rent_sql:
gender_age_rent_results.append({'gender' : result.gender, 'age' : result.age, 'rent' : result.rent, 'count' : result[3]})
return make_response(jsonify({'gender' : gender_results, 'gender_age_rent' : gender_age_rent_results})), 200
@app.route('/api/ng_event/districts', methods=['GET'])
def districts():
home_results = []
for result in db.session.query(ZoneDistrict.home_district_code, ZoneDistrict.home_district_name, func.sum(ZoneDistrict.visitors)).group_by(ZoneDistrict.home_district_code, ZoneDistrict.home_district_name).all():
home_results.append({'district_code' : result.home_district_code, 'district_name' : result.home_district_name, 'visitors' : result[2]})
work_results = []
for result in db.session.query(ZoneDistrict.work_district_code, ZoneDistrict.work_district_name, func.sum(ZoneDistrict.visitors)).group_by(ZoneDistrict.work_district_code, ZoneDistrict.work_district_name).all():
work_results.append({'district_code' : result.work_district_code, 'district_name' : result.work_district_name, 'visitors' : result[2]})
return make_response(jsonify({'work' : { 'list' : work_results }, 'home' : { 'list' : home_results }})), 200
@app.route('/api/ng_event/attractiontotals', methods=['GET'])
def attractiontotals():
results = []
for result in db.session.query(AttractionTotal.zone_visitors, AttractionTotal.num_visitors).all():
results.append({'zone_visitors' : result.zone_visitors, 'num_visitors' : result.num_visitors})
return make_response(jsonify({'totals' : { 'list' : results }})), 200
@app.route('/api/ng_event/profiles', methods=['GET'])
def profiles():
results = []
for result in db.session.query(Profile.country, Profile.nationality, Profile.name_province, Profile.gender, Profile.age, Profile.rent, Profile.type_visitor, Profile.date, Profile.day, Profile.period, Profile.name_tur_zone).limit(10000):
district = ''
if result.name_tur_zone == 'Zone 1' : district = 'Chamartin'
if result.name_tur_zone == 'Zone 2' : district = 'Chamberi'
if result.name_tur_zone == 'Zone 3' : district = 'Salamanca'
day = ''
if result.day == 'Monday' : day = 'Mon'
if result.day == 'Tuesday' : day = 'Tue'
if result.day == 'Wednesday' : day = 'Wed'
if result.day == 'Thursday' : day = 'Thu'
if result.day == 'Friday' : day = 'Fri'
if result.day == 'Saturday' : day = 'Sat'
if result.day == 'Sunday' : day = 'Sun'
results.append({'country' : result.country, 'nationality' : result.nationality, 'name_province' : district, 'gender' : result.gender, 'age' : result.age, 'rent' : result.rent, 'type_visitor' : result.type_visitor, 'date' : result.date, 'day' : day, 'period' : result.period, 'zone' : result.name_tur_zone })
return make_response(jsonify(results)), 200
@app.route('/api/ng_event/dowfreq', methods=['GET'])
def dowfreq():
results = []
for result in db.session.query(DOWFrequency.type_visitor, DOWFrequency.start_dow, DOWFrequency.start_hour, DOWFrequency.count).all():
results.append({'type_visitor' : result.type_visitor, 'start_dow' : result.start_dow, 'start_hour' : result.start_hour, 'count' : result.count })
return make_response(jsonify(results)), 200
return app
|
normal
|
{
"blob_id": "2f76bcfde11597f87bb9e058f7617e95c78ed383",
"index": 7950,
"step-1": "<mask token>\n\n\nclass Department(SQLAlchemyObjectType):\n\n\n class Meta:\n model = DepartmentModel\n interfaces = relay.Node,\n\n\nclass Query(graphene.ObjectType):\n node = relay.Node.Field()\n all_employees = SQLAlchemyConnectionField(Department)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Department(SQLAlchemyObjectType):\n\n\n class Meta:\n model = DepartmentModel\n interfaces = relay.Node,\n\n\nclass Query(graphene.ObjectType):\n node = relay.Node.Field()\n all_employees = SQLAlchemyConnectionField(Department)\n\n\ndef create_app(config_name):\n app = FlaskAPI(__name__, instance_relative_config=True)\n bcrypt = Bcrypt(app)\n schema = graphene.Schema(query=Query)\n app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql',\n schema=schema, graphiql=True))\n app.config.from_object(app_config[config_name])\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n db.init_app(app)\n\n @app.route('/api/areas/create', methods=['POST'])\n def create_areas():\n name = request.data.get('name', '')\n geodata = request.data.get('geodata', '')\n center_lat = request.data.get('center_lat')\n center_lng = request.data.get('center_lng')\n zoom = request.data.get('zoom')\n area = Area(name=name, geodata=geodata, center_lat=center_lat,\n center_lng=center_lng, zoom=zoom)\n area.save()\n response = jsonify({'id': area.id, 'name': area.name, 'geodata':\n area.geodata, 'center_lat': area.center_lat, 'center_lng': area\n .center_lng, 'zoom': area.zoom, 'date_created': area.\n date_created, 'date_modified': area.date_modified})\n return make_response(response), 201\n\n @app.route('/api/areas/delete', methods=['POST'])\n def delete_areas():\n id = request.data.get('id', 0)\n area = Area.query.filter_by(id=id).first()\n if area is not None:\n area.delete()\n return make_response(jsonify({'id': id})), 200\n\n @app.route('/api/sightingsperhour', methods=['GET'])\n def get_sightingsperhour():\n sightings = SightingsPerHourPerCountry.query.all()\n results = []\n for sighting in sightings:\n results.append({'country': sighting.country, 'hour': sighting.\n hour, 'count': sighting.count})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sightingsnew', methods=['POST'])\n def sightingsnew():\n sightings = db.session.query(SightingsBase.site_id, SightingsBase.\n country, func.count(SightingsBase.roundedtoday)).filter(\n SightingsBase.site_id.in_(request.data['selectedRow'])).filter(\n SightingsBase.roundedtoday.between(request.data['selectedDates'\n ][0], request.data['selectedDates'][1])).group_by(SightingsBase\n .site_id, SightingsBase.country).order_by(SightingsBase.site_id,\n func.count(SightingsBase.roundedtoday).desc())\n results = []\n for sighting in sightings.all():\n results.append({'country': sighting.country, 'site_id':\n sighting.site_id, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightingsnew', methods=['POST', 'GET'])\n def widesightingsnew():\n sightings = db.session.query(WideSighting.site_id, WideSighting.\n gender, func.count(WideSighting.gender)).filter(WideSighting.\n site_id.in_([138, 134])).group_by(WideSighting.site_id,\n WideSighting.gender)\n results = []\n for sighting in sightings.all():\n results.append({'site_id': sighting.site_id, 'gender': sighting\n .gender, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightings', methods=['GET'])\n def widesightings():\n sightings = WideSighting.get_all()\n results = []\n for sighting in sightings:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sites', methods=['GET'])\n def get_sites():\n sites = Site.get_all()\n results = []\n for site in sites:\n results.append(site.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/dates', methods=['GET'])\n def get_dates():\n dates = Date.get_all()\n results = []\n for date in dates:\n results.append(date.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/areas', methods=['GET'])\n def get_areas():\n areas = Area.get_all()\n allSmallCells = SmallCell.get_all()\n results = []\n for area in areas:\n smallcellInArea = []\n for smallcell in allSmallCells:\n smallcellInArea.append(smallcell.serialise())\n obj = {'id': area.id, 'name': area.name, 'date_created': area.\n date_created, 'date_modified': area.date_modified,\n 'center_lat': area.center_lat, 'center_lng': area.\n center_lng, 'zoom': area.zoom, 'geodata': area.geodata,\n 'smallcells': smallcellInArea}\n results.append(obj)\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells', methods=['GET'])\n def get_smallcells():\n allSmallCells = SmallCell.query.order_by(SmallCell.id).all()\n results = []\n for smallcell in allSmallCells:\n results.append(smallcell.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells/update', methods=['POST'])\n def update_smallcell():\n smallcell_id = request.data.get('id', '')\n site_id = request.data.get('site_id', '')\n smallcell = SmallCell.query.filter_by(id=smallcell_id).first()\n smallcell.site_id = site_id\n smallcell.save()\n return make_response(jsonify({'smallcell_id': smallcell.id,\n 'site_id': smallcell.site_id})), 200\n\n @app.route('/api/sighting/byarea/<areaid>', methods=['GET'])\n def get_sighting(areaid):\n import string\n area = Area.query.filter_by(id=areaid).first()\n if area is None:\n return make_response(jsonify({'list': []})), 200\n sites = []\n for site in Site.get_all():\n if area.contains(site):\n sites.append(str(site.id))\n\n def generate_random_data(num_rows):\n import random\n latitude = 51.51451110408478\n longitude = -0.12620388576521444\n result = []\n for _ in range(num_rows):\n dec_lat = random.random() / 10\n dec_lon = random.random() / 10\n result.append({'lat': latitude + dec_lat, 'lng': longitude +\n dec_lon})\n return result\n results = []\n if len(sites) > 0:\n for row in db.session.execute(\n 'select * from get_gender_crossfilter(ARRAY[' + ','.join(\n sites) + '])'):\n results.append({'geos': generate_random_data(5), 'gender':\n row['__gender'], 'age_range': row['__age_range'],\n 'timestamp': row['__sighting_date'], 'count': row[\n '__count']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgender/', methods=['POST'])\n def get_gender():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute('select * from get_gender(ARRAY[' +\n site_ids + '],' + \"'\" + from_sighting_date + \"'\" + ',' + \"'\" +\n to_sighting_date + \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'date_month': row[\n '__date_month'], 'gender': row['__gender'], 'age_range':\n row['__age_range'], 'perc_visits': row['__perc_visits'],\n 'scaled_visits': row['__scaled_visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgendertotals/', methods=['POST'])\n def get_gender_age_totals():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute(\n 'select * from get_gender_age_totals(ARRAY[' + site_ids + '],' +\n \"'\" + from_sighting_date + \"'\" + ',' + \"'\" + to_sighting_date +\n \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'gender': row[\n '__gender'], 'age_range': row['__age_range'], '__visits':\n row['__visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting', methods=['GET'])\n def get_sightings():\n results = []\n for sighting in LTESighting.get_all():\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sitescomparison', methods=['POST'])\n def get_sitescomparison():\n sightings = LTESighting.query.filter(LTESighting.smallcell.has(\n SmallCell.site_id.in_(request.data['selectedRow']))).filter(\n LTESighting.timestamp.between(request.data['selectedDates'][0],\n request.data['selectedDates'][1]))\n return make_response(jsonify({'list': [sighting.serialise() for\n sighting in sightings]})), 200\n\n @app.route('/api/sighting/bysite', methods=['GET'])\n def get_sightings_by_site():\n site_ids = request.args.getlist('site_id')\n results = []\n for sighting in LTESighting.query:\n if str(sighting.smallcell.site_id) in site_ids:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/origindestination/all', methods=['GET'])\n def get_all():\n journeys = Journey.query.all()\n thing = {}\n for journey in journeys:\n if journey.origin_id not in thing:\n thing[journey.origin_id] = {}\n if journey.destination_id not in thing[journey.origin_id\n ] and journey.destination_id != journey.origin_id:\n thing[journey.origin_id][journey.destination_id\n ] = journey.data['total']\n return make_response(jsonify(thing)), 200\n\n @app.route('/api/origindestination/<origin_id>', methods=['GET'])\n def get_od(origin_id):\n journeys = Journey.query.all()\n _j = []\n for journey in journeys:\n _j.append({'origin_id': journey.origin_id, 'destination_id':\n journey.destination_id, 'total': journey.data['total']})\n return make_response(jsonify({'list': _j})), 200\n\n @app.route('/api/ng_event/purchase/<home_district_name>/<type_visitor>',\n methods=['GET'])\n def purchase(home_district_name, type_visitor):\n days_sql = db.session.query(PurchDistrict.start_dow, func.count(\n PurchDistrict.start_dow)).group_by(PurchDistrict.start_dow).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).order_by(func.\n count(PurchDistrict.start_dow).desc()).all()\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, func.count(PurchDistrict.gender)).group_by(\n PurchDistrict.gender, PurchDistrict.age).filter(PurchDistrict.\n gender.isnot(None)).filter(PurchDistrict.age.isnot(None)).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n days_total = sum(i[1] for i in days_sql)\n gender_total = sum(i[1] for i in gender_sql)\n gender_age_total = sum(i[2] for i in gender_age_sql)\n days_results = []\n for result in days_sql:\n days_results.append({'start_dow': result.start_dow, 'count':\n result[1], 'percent': float(result[1]) / float(days_total),\n 'total': days_total})\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_results = []\n for result in gender_age_sql:\n gender_age_results.append({'gender': result.gender, 'age':\n result.age, 'count': result[2], 'percent': float(result[2]) /\n float(gender_age_total)})\n return make_response(jsonify({'days': days_results, 'gender':\n gender_results, 'gender_age': gender_age_results})), 200\n\n @app.route('/api/ng_event/purchase_affluence/<type_visitor>', methods=[\n 'GET'])\n def purchase_rent(type_visitor):\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n gender_total = sum(i[1] for i in gender_sql)\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_rent_results = []\n for result in gender_age_rent_sql:\n gender_age_rent_results.append({'gender': result.gender, 'age':\n result.age, 'rent': result.rent, 'count': result[3]})\n return make_response(jsonify({'gender': gender_results,\n 'gender_age_rent': gender_age_rent_results})), 200\n\n @app.route('/api/ng_event/districts', methods=['GET'])\n def districts():\n home_results = []\n for result in db.session.query(ZoneDistrict.home_district_code,\n ZoneDistrict.home_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.home_district_code, ZoneDistrict.\n home_district_name).all():\n home_results.append({'district_code': result.home_district_code,\n 'district_name': result.home_district_name, 'visitors':\n result[2]})\n work_results = []\n for result in db.session.query(ZoneDistrict.work_district_code,\n ZoneDistrict.work_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.work_district_code, ZoneDistrict.\n work_district_name).all():\n work_results.append({'district_code': result.work_district_code,\n 'district_name': result.work_district_name, 'visitors':\n result[2]})\n return make_response(jsonify({'work': {'list': work_results},\n 'home': {'list': home_results}})), 200\n\n @app.route('/api/ng_event/attractiontotals', methods=['GET'])\n def attractiontotals():\n results = []\n for result in db.session.query(AttractionTotal.zone_visitors,\n AttractionTotal.num_visitors).all():\n results.append({'zone_visitors': result.zone_visitors,\n 'num_visitors': result.num_visitors})\n return make_response(jsonify({'totals': {'list': results}})), 200\n\n @app.route('/api/ng_event/profiles', methods=['GET'])\n def profiles():\n results = []\n for result in db.session.query(Profile.country, Profile.nationality,\n Profile.name_province, Profile.gender, Profile.age, Profile.\n rent, Profile.type_visitor, Profile.date, Profile.day, Profile.\n period, Profile.name_tur_zone).limit(10000):\n district = ''\n if result.name_tur_zone == 'Zone 1':\n district = 'Chamartin'\n if result.name_tur_zone == 'Zone 2':\n district = 'Chamberi'\n if result.name_tur_zone == 'Zone 3':\n district = 'Salamanca'\n day = ''\n if result.day == 'Monday':\n day = 'Mon'\n if result.day == 'Tuesday':\n day = 'Tue'\n if result.day == 'Wednesday':\n day = 'Wed'\n if result.day == 'Thursday':\n day = 'Thu'\n if result.day == 'Friday':\n day = 'Fri'\n if result.day == 'Saturday':\n day = 'Sat'\n if result.day == 'Sunday':\n day = 'Sun'\n results.append({'country': result.country, 'nationality':\n result.nationality, 'name_province': district, 'gender':\n result.gender, 'age': result.age, 'rent': result.rent,\n 'type_visitor': result.type_visitor, 'date': result.date,\n 'day': day, 'period': result.period, 'zone': result.\n name_tur_zone})\n return make_response(jsonify(results)), 200\n\n @app.route('/api/ng_event/dowfreq', methods=['GET'])\n def dowfreq():\n results = []\n for result in db.session.query(DOWFrequency.type_visitor,\n DOWFrequency.start_dow, DOWFrequency.start_hour, DOWFrequency.count\n ).all():\n results.append({'type_visitor': result.type_visitor,\n 'start_dow': result.start_dow, 'start_hour': result.\n start_hour, 'count': result.count})\n return make_response(jsonify(results)), 200\n return app\n",
"step-3": "<mask token>\ndb = SQLAlchemy()\n<mask token>\n\n\nclass Department(SQLAlchemyObjectType):\n\n\n class Meta:\n model = DepartmentModel\n interfaces = relay.Node,\n\n\nclass Query(graphene.ObjectType):\n node = relay.Node.Field()\n all_employees = SQLAlchemyConnectionField(Department)\n\n\ndef create_app(config_name):\n app = FlaskAPI(__name__, instance_relative_config=True)\n bcrypt = Bcrypt(app)\n schema = graphene.Schema(query=Query)\n app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql',\n schema=schema, graphiql=True))\n app.config.from_object(app_config[config_name])\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n db.init_app(app)\n\n @app.route('/api/areas/create', methods=['POST'])\n def create_areas():\n name = request.data.get('name', '')\n geodata = request.data.get('geodata', '')\n center_lat = request.data.get('center_lat')\n center_lng = request.data.get('center_lng')\n zoom = request.data.get('zoom')\n area = Area(name=name, geodata=geodata, center_lat=center_lat,\n center_lng=center_lng, zoom=zoom)\n area.save()\n response = jsonify({'id': area.id, 'name': area.name, 'geodata':\n area.geodata, 'center_lat': area.center_lat, 'center_lng': area\n .center_lng, 'zoom': area.zoom, 'date_created': area.\n date_created, 'date_modified': area.date_modified})\n return make_response(response), 201\n\n @app.route('/api/areas/delete', methods=['POST'])\n def delete_areas():\n id = request.data.get('id', 0)\n area = Area.query.filter_by(id=id).first()\n if area is not None:\n area.delete()\n return make_response(jsonify({'id': id})), 200\n\n @app.route('/api/sightingsperhour', methods=['GET'])\n def get_sightingsperhour():\n sightings = SightingsPerHourPerCountry.query.all()\n results = []\n for sighting in sightings:\n results.append({'country': sighting.country, 'hour': sighting.\n hour, 'count': sighting.count})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sightingsnew', methods=['POST'])\n def sightingsnew():\n sightings = db.session.query(SightingsBase.site_id, SightingsBase.\n country, func.count(SightingsBase.roundedtoday)).filter(\n SightingsBase.site_id.in_(request.data['selectedRow'])).filter(\n SightingsBase.roundedtoday.between(request.data['selectedDates'\n ][0], request.data['selectedDates'][1])).group_by(SightingsBase\n .site_id, SightingsBase.country).order_by(SightingsBase.site_id,\n func.count(SightingsBase.roundedtoday).desc())\n results = []\n for sighting in sightings.all():\n results.append({'country': sighting.country, 'site_id':\n sighting.site_id, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightingsnew', methods=['POST', 'GET'])\n def widesightingsnew():\n sightings = db.session.query(WideSighting.site_id, WideSighting.\n gender, func.count(WideSighting.gender)).filter(WideSighting.\n site_id.in_([138, 134])).group_by(WideSighting.site_id,\n WideSighting.gender)\n results = []\n for sighting in sightings.all():\n results.append({'site_id': sighting.site_id, 'gender': sighting\n .gender, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightings', methods=['GET'])\n def widesightings():\n sightings = WideSighting.get_all()\n results = []\n for sighting in sightings:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sites', methods=['GET'])\n def get_sites():\n sites = Site.get_all()\n results = []\n for site in sites:\n results.append(site.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/dates', methods=['GET'])\n def get_dates():\n dates = Date.get_all()\n results = []\n for date in dates:\n results.append(date.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/areas', methods=['GET'])\n def get_areas():\n areas = Area.get_all()\n allSmallCells = SmallCell.get_all()\n results = []\n for area in areas:\n smallcellInArea = []\n for smallcell in allSmallCells:\n smallcellInArea.append(smallcell.serialise())\n obj = {'id': area.id, 'name': area.name, 'date_created': area.\n date_created, 'date_modified': area.date_modified,\n 'center_lat': area.center_lat, 'center_lng': area.\n center_lng, 'zoom': area.zoom, 'geodata': area.geodata,\n 'smallcells': smallcellInArea}\n results.append(obj)\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells', methods=['GET'])\n def get_smallcells():\n allSmallCells = SmallCell.query.order_by(SmallCell.id).all()\n results = []\n for smallcell in allSmallCells:\n results.append(smallcell.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells/update', methods=['POST'])\n def update_smallcell():\n smallcell_id = request.data.get('id', '')\n site_id = request.data.get('site_id', '')\n smallcell = SmallCell.query.filter_by(id=smallcell_id).first()\n smallcell.site_id = site_id\n smallcell.save()\n return make_response(jsonify({'smallcell_id': smallcell.id,\n 'site_id': smallcell.site_id})), 200\n\n @app.route('/api/sighting/byarea/<areaid>', methods=['GET'])\n def get_sighting(areaid):\n import string\n area = Area.query.filter_by(id=areaid).first()\n if area is None:\n return make_response(jsonify({'list': []})), 200\n sites = []\n for site in Site.get_all():\n if area.contains(site):\n sites.append(str(site.id))\n\n def generate_random_data(num_rows):\n import random\n latitude = 51.51451110408478\n longitude = -0.12620388576521444\n result = []\n for _ in range(num_rows):\n dec_lat = random.random() / 10\n dec_lon = random.random() / 10\n result.append({'lat': latitude + dec_lat, 'lng': longitude +\n dec_lon})\n return result\n results = []\n if len(sites) > 0:\n for row in db.session.execute(\n 'select * from get_gender_crossfilter(ARRAY[' + ','.join(\n sites) + '])'):\n results.append({'geos': generate_random_data(5), 'gender':\n row['__gender'], 'age_range': row['__age_range'],\n 'timestamp': row['__sighting_date'], 'count': row[\n '__count']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgender/', methods=['POST'])\n def get_gender():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute('select * from get_gender(ARRAY[' +\n site_ids + '],' + \"'\" + from_sighting_date + \"'\" + ',' + \"'\" +\n to_sighting_date + \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'date_month': row[\n '__date_month'], 'gender': row['__gender'], 'age_range':\n row['__age_range'], 'perc_visits': row['__perc_visits'],\n 'scaled_visits': row['__scaled_visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgendertotals/', methods=['POST'])\n def get_gender_age_totals():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute(\n 'select * from get_gender_age_totals(ARRAY[' + site_ids + '],' +\n \"'\" + from_sighting_date + \"'\" + ',' + \"'\" + to_sighting_date +\n \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'gender': row[\n '__gender'], 'age_range': row['__age_range'], '__visits':\n row['__visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting', methods=['GET'])\n def get_sightings():\n results = []\n for sighting in LTESighting.get_all():\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sitescomparison', methods=['POST'])\n def get_sitescomparison():\n sightings = LTESighting.query.filter(LTESighting.smallcell.has(\n SmallCell.site_id.in_(request.data['selectedRow']))).filter(\n LTESighting.timestamp.between(request.data['selectedDates'][0],\n request.data['selectedDates'][1]))\n return make_response(jsonify({'list': [sighting.serialise() for\n sighting in sightings]})), 200\n\n @app.route('/api/sighting/bysite', methods=['GET'])\n def get_sightings_by_site():\n site_ids = request.args.getlist('site_id')\n results = []\n for sighting in LTESighting.query:\n if str(sighting.smallcell.site_id) in site_ids:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/origindestination/all', methods=['GET'])\n def get_all():\n journeys = Journey.query.all()\n thing = {}\n for journey in journeys:\n if journey.origin_id not in thing:\n thing[journey.origin_id] = {}\n if journey.destination_id not in thing[journey.origin_id\n ] and journey.destination_id != journey.origin_id:\n thing[journey.origin_id][journey.destination_id\n ] = journey.data['total']\n return make_response(jsonify(thing)), 200\n\n @app.route('/api/origindestination/<origin_id>', methods=['GET'])\n def get_od(origin_id):\n journeys = Journey.query.all()\n _j = []\n for journey in journeys:\n _j.append({'origin_id': journey.origin_id, 'destination_id':\n journey.destination_id, 'total': journey.data['total']})\n return make_response(jsonify({'list': _j})), 200\n\n @app.route('/api/ng_event/purchase/<home_district_name>/<type_visitor>',\n methods=['GET'])\n def purchase(home_district_name, type_visitor):\n days_sql = db.session.query(PurchDistrict.start_dow, func.count(\n PurchDistrict.start_dow)).group_by(PurchDistrict.start_dow).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).order_by(func.\n count(PurchDistrict.start_dow).desc()).all()\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, func.count(PurchDistrict.gender)).group_by(\n PurchDistrict.gender, PurchDistrict.age).filter(PurchDistrict.\n gender.isnot(None)).filter(PurchDistrict.age.isnot(None)).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n days_total = sum(i[1] for i in days_sql)\n gender_total = sum(i[1] for i in gender_sql)\n gender_age_total = sum(i[2] for i in gender_age_sql)\n days_results = []\n for result in days_sql:\n days_results.append({'start_dow': result.start_dow, 'count':\n result[1], 'percent': float(result[1]) / float(days_total),\n 'total': days_total})\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_results = []\n for result in gender_age_sql:\n gender_age_results.append({'gender': result.gender, 'age':\n result.age, 'count': result[2], 'percent': float(result[2]) /\n float(gender_age_total)})\n return make_response(jsonify({'days': days_results, 'gender':\n gender_results, 'gender_age': gender_age_results})), 200\n\n @app.route('/api/ng_event/purchase_affluence/<type_visitor>', methods=[\n 'GET'])\n def purchase_rent(type_visitor):\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n gender_total = sum(i[1] for i in gender_sql)\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_rent_results = []\n for result in gender_age_rent_sql:\n gender_age_rent_results.append({'gender': result.gender, 'age':\n result.age, 'rent': result.rent, 'count': result[3]})\n return make_response(jsonify({'gender': gender_results,\n 'gender_age_rent': gender_age_rent_results})), 200\n\n @app.route('/api/ng_event/districts', methods=['GET'])\n def districts():\n home_results = []\n for result in db.session.query(ZoneDistrict.home_district_code,\n ZoneDistrict.home_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.home_district_code, ZoneDistrict.\n home_district_name).all():\n home_results.append({'district_code': result.home_district_code,\n 'district_name': result.home_district_name, 'visitors':\n result[2]})\n work_results = []\n for result in db.session.query(ZoneDistrict.work_district_code,\n ZoneDistrict.work_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.work_district_code, ZoneDistrict.\n work_district_name).all():\n work_results.append({'district_code': result.work_district_code,\n 'district_name': result.work_district_name, 'visitors':\n result[2]})\n return make_response(jsonify({'work': {'list': work_results},\n 'home': {'list': home_results}})), 200\n\n @app.route('/api/ng_event/attractiontotals', methods=['GET'])\n def attractiontotals():\n results = []\n for result in db.session.query(AttractionTotal.zone_visitors,\n AttractionTotal.num_visitors).all():\n results.append({'zone_visitors': result.zone_visitors,\n 'num_visitors': result.num_visitors})\n return make_response(jsonify({'totals': {'list': results}})), 200\n\n @app.route('/api/ng_event/profiles', methods=['GET'])\n def profiles():\n results = []\n for result in db.session.query(Profile.country, Profile.nationality,\n Profile.name_province, Profile.gender, Profile.age, Profile.\n rent, Profile.type_visitor, Profile.date, Profile.day, Profile.\n period, Profile.name_tur_zone).limit(10000):\n district = ''\n if result.name_tur_zone == 'Zone 1':\n district = 'Chamartin'\n if result.name_tur_zone == 'Zone 2':\n district = 'Chamberi'\n if result.name_tur_zone == 'Zone 3':\n district = 'Salamanca'\n day = ''\n if result.day == 'Monday':\n day = 'Mon'\n if result.day == 'Tuesday':\n day = 'Tue'\n if result.day == 'Wednesday':\n day = 'Wed'\n if result.day == 'Thursday':\n day = 'Thu'\n if result.day == 'Friday':\n day = 'Fri'\n if result.day == 'Saturday':\n day = 'Sat'\n if result.day == 'Sunday':\n day = 'Sun'\n results.append({'country': result.country, 'nationality':\n result.nationality, 'name_province': district, 'gender':\n result.gender, 'age': result.age, 'rent': result.rent,\n 'type_visitor': result.type_visitor, 'date': result.date,\n 'day': day, 'period': result.period, 'zone': result.\n name_tur_zone})\n return make_response(jsonify(results)), 200\n\n @app.route('/api/ng_event/dowfreq', methods=['GET'])\n def dowfreq():\n results = []\n for result in db.session.query(DOWFrequency.type_visitor,\n DOWFrequency.start_dow, DOWFrequency.start_hour, DOWFrequency.count\n ).all():\n results.append({'type_visitor': result.type_visitor,\n 'start_dow': result.start_dow, 'start_hour': result.\n start_hour, 'count': result.count})\n return make_response(jsonify(results)), 200\n return app\n",
"step-4": "import json\nfrom flask_api import FlaskAPI, status\nimport graphene\nfrom graphene import relay\nfrom graphene_sqlalchemy import SQLAlchemyConnectionField, SQLAlchemyObjectType\nfrom flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy import func\nfrom flask import request, jsonify, abort, make_response\nfrom flask_graphql import GraphQLView\nfrom shapely.geometry import shape, Point\nfrom instance.config import app_config\nfrom flask_bcrypt import Bcrypt\ndb = SQLAlchemy()\nfrom app.models import Date, Area, LTESighting, SmallCell, Site, SightingsPerHourPerCountry, SightingsNew, SightingsBase, WideSighting, Journey\nfrom app.models import Department as DepartmentModel\nfrom app.ng_event_models import ZoneDistrict, AttractionTotal, Profile, PurchDistrict, DOWFrequency\n\n\nclass Department(SQLAlchemyObjectType):\n\n\n class Meta:\n model = DepartmentModel\n interfaces = relay.Node,\n\n\nclass Query(graphene.ObjectType):\n node = relay.Node.Field()\n all_employees = SQLAlchemyConnectionField(Department)\n\n\ndef create_app(config_name):\n app = FlaskAPI(__name__, instance_relative_config=True)\n bcrypt = Bcrypt(app)\n schema = graphene.Schema(query=Query)\n app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql',\n schema=schema, graphiql=True))\n app.config.from_object(app_config[config_name])\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n db.init_app(app)\n\n @app.route('/api/areas/create', methods=['POST'])\n def create_areas():\n name = request.data.get('name', '')\n geodata = request.data.get('geodata', '')\n center_lat = request.data.get('center_lat')\n center_lng = request.data.get('center_lng')\n zoom = request.data.get('zoom')\n area = Area(name=name, geodata=geodata, center_lat=center_lat,\n center_lng=center_lng, zoom=zoom)\n area.save()\n response = jsonify({'id': area.id, 'name': area.name, 'geodata':\n area.geodata, 'center_lat': area.center_lat, 'center_lng': area\n .center_lng, 'zoom': area.zoom, 'date_created': area.\n date_created, 'date_modified': area.date_modified})\n return make_response(response), 201\n\n @app.route('/api/areas/delete', methods=['POST'])\n def delete_areas():\n id = request.data.get('id', 0)\n area = Area.query.filter_by(id=id).first()\n if area is not None:\n area.delete()\n return make_response(jsonify({'id': id})), 200\n\n @app.route('/api/sightingsperhour', methods=['GET'])\n def get_sightingsperhour():\n sightings = SightingsPerHourPerCountry.query.all()\n results = []\n for sighting in sightings:\n results.append({'country': sighting.country, 'hour': sighting.\n hour, 'count': sighting.count})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sightingsnew', methods=['POST'])\n def sightingsnew():\n sightings = db.session.query(SightingsBase.site_id, SightingsBase.\n country, func.count(SightingsBase.roundedtoday)).filter(\n SightingsBase.site_id.in_(request.data['selectedRow'])).filter(\n SightingsBase.roundedtoday.between(request.data['selectedDates'\n ][0], request.data['selectedDates'][1])).group_by(SightingsBase\n .site_id, SightingsBase.country).order_by(SightingsBase.site_id,\n func.count(SightingsBase.roundedtoday).desc())\n results = []\n for sighting in sightings.all():\n results.append({'country': sighting.country, 'site_id':\n sighting.site_id, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightingsnew', methods=['POST', 'GET'])\n def widesightingsnew():\n sightings = db.session.query(WideSighting.site_id, WideSighting.\n gender, func.count(WideSighting.gender)).filter(WideSighting.\n site_id.in_([138, 134])).group_by(WideSighting.site_id,\n WideSighting.gender)\n results = []\n for sighting in sightings.all():\n results.append({'site_id': sighting.site_id, 'gender': sighting\n .gender, 'count': sighting[2]})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/widesightings', methods=['GET'])\n def widesightings():\n sightings = WideSighting.get_all()\n results = []\n for sighting in sightings:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sites', methods=['GET'])\n def get_sites():\n sites = Site.get_all()\n results = []\n for site in sites:\n results.append(site.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/dates', methods=['GET'])\n def get_dates():\n dates = Date.get_all()\n results = []\n for date in dates:\n results.append(date.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/areas', methods=['GET'])\n def get_areas():\n areas = Area.get_all()\n allSmallCells = SmallCell.get_all()\n results = []\n for area in areas:\n smallcellInArea = []\n for smallcell in allSmallCells:\n smallcellInArea.append(smallcell.serialise())\n obj = {'id': area.id, 'name': area.name, 'date_created': area.\n date_created, 'date_modified': area.date_modified,\n 'center_lat': area.center_lat, 'center_lng': area.\n center_lng, 'zoom': area.zoom, 'geodata': area.geodata,\n 'smallcells': smallcellInArea}\n results.append(obj)\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells', methods=['GET'])\n def get_smallcells():\n allSmallCells = SmallCell.query.order_by(SmallCell.id).all()\n results = []\n for smallcell in allSmallCells:\n results.append(smallcell.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/smallcells/update', methods=['POST'])\n def update_smallcell():\n smallcell_id = request.data.get('id', '')\n site_id = request.data.get('site_id', '')\n smallcell = SmallCell.query.filter_by(id=smallcell_id).first()\n smallcell.site_id = site_id\n smallcell.save()\n return make_response(jsonify({'smallcell_id': smallcell.id,\n 'site_id': smallcell.site_id})), 200\n\n @app.route('/api/sighting/byarea/<areaid>', methods=['GET'])\n def get_sighting(areaid):\n import string\n area = Area.query.filter_by(id=areaid).first()\n if area is None:\n return make_response(jsonify({'list': []})), 200\n sites = []\n for site in Site.get_all():\n if area.contains(site):\n sites.append(str(site.id))\n\n def generate_random_data(num_rows):\n import random\n latitude = 51.51451110408478\n longitude = -0.12620388576521444\n result = []\n for _ in range(num_rows):\n dec_lat = random.random() / 10\n dec_lon = random.random() / 10\n result.append({'lat': latitude + dec_lat, 'lng': longitude +\n dec_lon})\n return result\n results = []\n if len(sites) > 0:\n for row in db.session.execute(\n 'select * from get_gender_crossfilter(ARRAY[' + ','.join(\n sites) + '])'):\n results.append({'geos': generate_random_data(5), 'gender':\n row['__gender'], 'age_range': row['__age_range'],\n 'timestamp': row['__sighting_date'], 'count': row[\n '__count']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgender/', methods=['POST'])\n def get_gender():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute('select * from get_gender(ARRAY[' +\n site_ids + '],' + \"'\" + from_sighting_date + \"'\" + ',' + \"'\" +\n to_sighting_date + \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'date_month': row[\n '__date_month'], 'gender': row['__gender'], 'age_range':\n row['__age_range'], 'perc_visits': row['__perc_visits'],\n 'scaled_visits': row['__scaled_visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting/getgendertotals/', methods=['POST'])\n def get_gender_age_totals():\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n import string\n results = []\n for row in db.session.execute(\n 'select * from get_gender_age_totals(ARRAY[' + site_ids + '],' +\n \"'\" + from_sighting_date + \"'\" + ',' + \"'\" + to_sighting_date +\n \"'\" + ')'):\n results.append({'site_id': row['__site_id'], 'gender': row[\n '__gender'], 'age_range': row['__age_range'], '__visits':\n row['__visits']})\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sighting', methods=['GET'])\n def get_sightings():\n results = []\n for sighting in LTESighting.get_all():\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/sitescomparison', methods=['POST'])\n def get_sitescomparison():\n sightings = LTESighting.query.filter(LTESighting.smallcell.has(\n SmallCell.site_id.in_(request.data['selectedRow']))).filter(\n LTESighting.timestamp.between(request.data['selectedDates'][0],\n request.data['selectedDates'][1]))\n return make_response(jsonify({'list': [sighting.serialise() for\n sighting in sightings]})), 200\n\n @app.route('/api/sighting/bysite', methods=['GET'])\n def get_sightings_by_site():\n site_ids = request.args.getlist('site_id')\n results = []\n for sighting in LTESighting.query:\n if str(sighting.smallcell.site_id) in site_ids:\n results.append(sighting.serialise())\n return make_response(jsonify({'list': results})), 200\n\n @app.route('/api/origindestination/all', methods=['GET'])\n def get_all():\n journeys = Journey.query.all()\n thing = {}\n for journey in journeys:\n if journey.origin_id not in thing:\n thing[journey.origin_id] = {}\n if journey.destination_id not in thing[journey.origin_id\n ] and journey.destination_id != journey.origin_id:\n thing[journey.origin_id][journey.destination_id\n ] = journey.data['total']\n return make_response(jsonify(thing)), 200\n\n @app.route('/api/origindestination/<origin_id>', methods=['GET'])\n def get_od(origin_id):\n journeys = Journey.query.all()\n _j = []\n for journey in journeys:\n _j.append({'origin_id': journey.origin_id, 'destination_id':\n journey.destination_id, 'total': journey.data['total']})\n return make_response(jsonify({'list': _j})), 200\n\n @app.route('/api/ng_event/purchase/<home_district_name>/<type_visitor>',\n methods=['GET'])\n def purchase(home_district_name, type_visitor):\n days_sql = db.session.query(PurchDistrict.start_dow, func.count(\n PurchDistrict.start_dow)).group_by(PurchDistrict.start_dow).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).order_by(func.\n count(PurchDistrict.start_dow).desc()).all()\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, func.count(PurchDistrict.gender)).group_by(\n PurchDistrict.gender, PurchDistrict.age).filter(PurchDistrict.\n gender.isnot(None)).filter(PurchDistrict.age.isnot(None)).filter(\n PurchDistrict.home_district_name.in_([home_district_name])).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n days_total = sum(i[1] for i in days_sql)\n gender_total = sum(i[1] for i in gender_sql)\n gender_age_total = sum(i[2] for i in gender_age_sql)\n days_results = []\n for result in days_sql:\n days_results.append({'start_dow': result.start_dow, 'count':\n result[1], 'percent': float(result[1]) / float(days_total),\n 'total': days_total})\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_results = []\n for result in gender_age_sql:\n gender_age_results.append({'gender': result.gender, 'age':\n result.age, 'count': result[2], 'percent': float(result[2]) /\n float(gender_age_total)})\n return make_response(jsonify({'days': days_results, 'gender':\n gender_results, 'gender_age': gender_age_results})), 200\n\n @app.route('/api/ng_event/purchase_affluence/<type_visitor>', methods=[\n 'GET'])\n def purchase_rent(type_visitor):\n gender_sql = db.session.query(PurchDistrict.gender, func.count(\n PurchDistrict.gender)).group_by(PurchDistrict.gender).filter(\n PurchDistrict.type_visitor.in_([type_visitor])).all()\n gender_age_rent_sql = db.session.query(PurchDistrict.gender,\n PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict\n .gender)).group_by(PurchDistrict.gender, PurchDistrict.age,\n PurchDistrict.rent).filter(PurchDistrict.gender.isnot(None)\n ).filter(PurchDistrict.age.isnot(None)).filter(PurchDistrict.\n type_visitor.in_([type_visitor])).all()\n gender_total = sum(i[1] for i in gender_sql)\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender': result.gender, 'count': result\n [1], 'percent': float(result[1]) / float(gender_total)})\n gender_age_rent_results = []\n for result in gender_age_rent_sql:\n gender_age_rent_results.append({'gender': result.gender, 'age':\n result.age, 'rent': result.rent, 'count': result[3]})\n return make_response(jsonify({'gender': gender_results,\n 'gender_age_rent': gender_age_rent_results})), 200\n\n @app.route('/api/ng_event/districts', methods=['GET'])\n def districts():\n home_results = []\n for result in db.session.query(ZoneDistrict.home_district_code,\n ZoneDistrict.home_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.home_district_code, ZoneDistrict.\n home_district_name).all():\n home_results.append({'district_code': result.home_district_code,\n 'district_name': result.home_district_name, 'visitors':\n result[2]})\n work_results = []\n for result in db.session.query(ZoneDistrict.work_district_code,\n ZoneDistrict.work_district_name, func.sum(ZoneDistrict.visitors)\n ).group_by(ZoneDistrict.work_district_code, ZoneDistrict.\n work_district_name).all():\n work_results.append({'district_code': result.work_district_code,\n 'district_name': result.work_district_name, 'visitors':\n result[2]})\n return make_response(jsonify({'work': {'list': work_results},\n 'home': {'list': home_results}})), 200\n\n @app.route('/api/ng_event/attractiontotals', methods=['GET'])\n def attractiontotals():\n results = []\n for result in db.session.query(AttractionTotal.zone_visitors,\n AttractionTotal.num_visitors).all():\n results.append({'zone_visitors': result.zone_visitors,\n 'num_visitors': result.num_visitors})\n return make_response(jsonify({'totals': {'list': results}})), 200\n\n @app.route('/api/ng_event/profiles', methods=['GET'])\n def profiles():\n results = []\n for result in db.session.query(Profile.country, Profile.nationality,\n Profile.name_province, Profile.gender, Profile.age, Profile.\n rent, Profile.type_visitor, Profile.date, Profile.day, Profile.\n period, Profile.name_tur_zone).limit(10000):\n district = ''\n if result.name_tur_zone == 'Zone 1':\n district = 'Chamartin'\n if result.name_tur_zone == 'Zone 2':\n district = 'Chamberi'\n if result.name_tur_zone == 'Zone 3':\n district = 'Salamanca'\n day = ''\n if result.day == 'Monday':\n day = 'Mon'\n if result.day == 'Tuesday':\n day = 'Tue'\n if result.day == 'Wednesday':\n day = 'Wed'\n if result.day == 'Thursday':\n day = 'Thu'\n if result.day == 'Friday':\n day = 'Fri'\n if result.day == 'Saturday':\n day = 'Sat'\n if result.day == 'Sunday':\n day = 'Sun'\n results.append({'country': result.country, 'nationality':\n result.nationality, 'name_province': district, 'gender':\n result.gender, 'age': result.age, 'rent': result.rent,\n 'type_visitor': result.type_visitor, 'date': result.date,\n 'day': day, 'period': result.period, 'zone': result.\n name_tur_zone})\n return make_response(jsonify(results)), 200\n\n @app.route('/api/ng_event/dowfreq', methods=['GET'])\n def dowfreq():\n results = []\n for result in db.session.query(DOWFrequency.type_visitor,\n DOWFrequency.start_dow, DOWFrequency.start_hour, DOWFrequency.count\n ).all():\n results.append({'type_visitor': result.type_visitor,\n 'start_dow': result.start_dow, 'start_hour': result.\n start_hour, 'count': result.count})\n return make_response(jsonify(results)), 200\n return app\n",
"step-5": "# app/__init__.py\nimport json\nfrom flask_api import FlaskAPI, status\nimport graphene\nfrom graphene import relay\nfrom graphene_sqlalchemy import SQLAlchemyConnectionField, SQLAlchemyObjectType\nfrom flask_sqlalchemy import SQLAlchemy\nfrom sqlalchemy import func\nfrom flask import request, jsonify, abort, make_response\n\nfrom flask_graphql import GraphQLView\n\nfrom shapely.geometry import shape, Point\n\n# local import\n\nfrom instance.config import app_config\n\n# For password hashing\nfrom flask_bcrypt import Bcrypt\n\n# initialize db\ndb = SQLAlchemy()\n\nfrom app.models import Date, Area, LTESighting, SmallCell, Site, SightingsPerHourPerCountry, SightingsNew, SightingsBase, WideSighting, Journey\nfrom app.models import Department as DepartmentModel\nfrom app.ng_event_models import ZoneDistrict, AttractionTotal, Profile, PurchDistrict, DOWFrequency\n\nclass Department(SQLAlchemyObjectType):\n\n class Meta:\n model = DepartmentModel\n interfaces = (relay.Node, )\n\nclass Query(graphene.ObjectType):\n node = relay.Node.Field()\n all_employees = SQLAlchemyConnectionField(Department)\n\ndef create_app(config_name):\n\n app = FlaskAPI(__name__, instance_relative_config=True)\n # overriding Werkzeugs built-in password hashing utilities using Bcrypt.\n bcrypt = Bcrypt(app)\n\n schema = graphene.Schema(query=Query)\n\n app.add_url_rule('/graphql', view_func=GraphQLView.as_view('graphql', schema=schema, graphiql=True))\n\n app.config.from_object(app_config[config_name])\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n db.init_app(app)\n\n @app.route('/api/areas/create', methods=['POST'])\n def create_areas():\n # get the access token\n\n name = request.data.get('name', '')\n geodata = request.data.get('geodata', '')\n center_lat = request.data.get('center_lat')\n center_lng = request.data.get('center_lng')\n zoom = request.data.get('zoom')\n\n area = Area(name=name, geodata=geodata, center_lat=center_lat, center_lng=center_lng, zoom=zoom)\n area.save()\n response = jsonify({\n 'id': area.id,\n 'name': area.name,\n 'geodata': area.geodata,\n 'center_lat' : area.center_lat,\n 'center_lng' : area.center_lng,\n 'zoom' : area.zoom,\n 'date_created': area.date_created,\n 'date_modified': area.date_modified\n })\n\n return make_response(response), 201\n\n @app.route('/api/areas/delete', methods=['POST'])\n def delete_areas():\n # get the access token\n id = request.data.get('id', 0)\n area = Area.query.filter_by(id=id).first()\n\n if (area is not None):\n area.delete()\n\n return make_response(jsonify({'id':id})), 200\n\n\n @app.route('/api/sightingsperhour', methods=['GET'])\n def get_sightingsperhour():\n # get all the areas\n sightings = SightingsPerHourPerCountry.query.all()\n results = []\n for sighting in sightings:\n results.append({'country' : sighting.country, 'hour' : sighting.hour, 'count' : sighting.count})\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/sightingsnew', methods=['POST'])\n def sightingsnew():\n\n sightings = db.session.query(SightingsBase.site_id, SightingsBase.country, func.count(SightingsBase.roundedtoday))\\\n .filter(SightingsBase.site_id.in_(request.data['selectedRow']))\\\n .filter(SightingsBase.roundedtoday.between(request.data['selectedDates'][0], request.data['selectedDates'][1]))\\\n .group_by(SightingsBase.site_id, SightingsBase.country)\\\n .order_by(SightingsBase.site_id, func.count(SightingsBase.roundedtoday).desc())\\\n\n results = []\n for sighting in sightings.all():\n results.append({'country' : sighting.country, 'site_id' : sighting.site_id, 'count' : sighting[2]})\n\n return make_response(jsonify({ 'list' : results })), 200\n\n\n @app.route('/api/widesightingsnew', methods=['POST', 'GET'])\n def widesightingsnew():\n\n sightings = db.session.query(WideSighting.site_id, WideSighting.gender, func.count(WideSighting.gender))\\\n .filter(WideSighting.site_id.in_([138, 134]))\\\n .group_by(WideSighting.site_id, WideSighting.gender)\n\n results = []\n for sighting in sightings.all():\n #gender = sighting.gender if len(sighting.gender) else 'unknown'\n results.append({'site_id' : sighting.site_id, 'gender' : sighting.gender, 'count' : sighting[2]})\n\n return make_response(jsonify({ 'list' : results })), 200\n\n\n @app.route('/api/widesightings', methods=['GET'])\n def widesightings():\n\n sightings = WideSighting.get_all()\n\n results = []\n for sighting in sightings:\n results.append(sighting.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/sites', methods=['GET'])\n def get_sites():\n # get all the areas\n sites = Site.get_all()\n results = []\n for site in sites:\n results.append(site.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/dates', methods=['GET'])\n def get_dates():\n # get all the areas\n dates = Date.get_all()\n results = []\n for date in dates:\n results.append(date.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/areas', methods=['GET'])\n def get_areas():\n # get all the areas\n areas = Area.get_all()\n allSmallCells = SmallCell.get_all()\n\n results = []\n\n for area in areas:\n\n smallcellInArea = []\n for smallcell in allSmallCells:\n smallcellInArea.append(smallcell.serialise())\n\n obj = {\n 'id': area.id,\n 'name': area.name,\n 'date_created': area.date_created,\n 'date_modified': area.date_modified,\n 'center_lat' : area.center_lat,\n 'center_lng' : area.center_lng,\n 'zoom' : area.zoom,\n 'geodata': area.geodata,\n 'smallcells' : smallcellInArea\n }\n results.append(obj)\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/smallcells', methods=['GET'])\n def get_smallcells():\n allSmallCells = SmallCell.query.order_by(SmallCell.id).all()\n\n results = []\n for smallcell in allSmallCells:\n results.append(smallcell.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/smallcells/update', methods=['POST'])\n def update_smallcell():\n smallcell_id = request.data.get('id', '')\n site_id = request.data.get('site_id', '')\n\n smallcell = SmallCell.query.filter_by(id=smallcell_id).first()\n smallcell.site_id = site_id\n smallcell.save()\n\n return make_response(jsonify({ 'smallcell_id' : smallcell.id, 'site_id' : smallcell.site_id })), 200\n\n @app.route('/api/sighting/byarea/<areaid>', methods=['GET'])\n def get_sighting(areaid):\n import string\n area = Area.query.filter_by(id=areaid).first()\n if area is None : return make_response(jsonify({ 'list' : [] })), 200\n\n sites = []\n for site in Site.get_all():\n if area.contains(site):\n sites.append(str(site.id))\n\n def generate_random_data(num_rows):\n import random\n latitude = 51.51451110408478\n longitude = -0.12620388576521444\n result = []\n for _ in range(num_rows):\n dec_lat = random.random()/10\n dec_lon = random.random()/10\n result.append({'lat' : latitude + dec_lat, 'lng' : longitude + dec_lon})\n return result\n\n results = []\n if (len(sites) > 0):\n for row in db.session.execute('select * from get_gender_crossfilter(ARRAY[' + ','.join(sites) + '])'):\n\n results.append(({ 'geos': generate_random_data(5), 'gender' : row['__gender'], 'age_range' : row['__age_range'], 'timestamp' : row['__sighting_date'], 'count' : row['__count'] }))\n\n return make_response(jsonify({ 'list' : results })), 200\n\n\n\n @app.route('/api/sighting/getgender/', methods=['POST'])\n def get_gender():\n\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n\n import string\n\n results = []\n\n for row in db.session.execute(\"select * from get_gender(ARRAY[\" + site_ids + \"],\" + \"'\" + from_sighting_date + \"'\" + \",\" + \"'\" + to_sighting_date + \"'\" + \")\"):\n results.append(({ 'site_id' : row['__site_id'], 'date_month' : row['__date_month'], 'gender' : row['__gender'], 'age_range' : row['__age_range'], 'perc_visits' : row['__perc_visits'], 'scaled_visits' : row['__scaled_visits'] }))\n\n return make_response(jsonify({ 'list' : results })), 200\n\n\n @app.route('/api/sighting/getgendertotals/', methods=['POST'])\n def get_gender_age_totals():\n\n site_ids = str(request.data.get('site_ids', ''))\n from_sighting_date = request.data.get('selectedDates')[0]\n to_sighting_date = request.data.get('selectedDates')[1]\n\n import string\n\n results = []\n\n for row in db.session.execute(\"select * from get_gender_age_totals(ARRAY[\" + site_ids + \"],\" + \"'\" + from_sighting_date + \"'\" + \",\" + \"'\" + to_sighting_date + \"'\" + \")\"):\n results.append(({ 'site_id' : row['__site_id'], 'gender' : row['__gender'], 'age_range' : row['__age_range'], '__visits' : row['__visits'] }))\n\n return make_response(jsonify({ 'list' : results })), 200\n\n\n\n @app.route('/api/sighting', methods=['GET'])\n def get_sightings():\n\n results = []\n for sighting in LTESighting.get_all():\n results.append(sighting.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/sitescomparison', methods=['POST'])\n def get_sitescomparison():\n\n sightings = LTESighting.query\\\n .filter(LTESighting.smallcell.has(SmallCell.site_id.in_(request.data['selectedRow'])))\\\n .filter(LTESighting.timestamp.between(request.data['selectedDates'][0], request.data['selectedDates'][1]))\n\n return make_response(jsonify({ 'list' : [sighting.serialise() for sighting in sightings] })), 200\n\n @app.route('/api/sighting/bysite', methods=['GET'])\n def get_sightings_by_site():\n\n site_ids = (request.args.getlist('site_id'))\n\n results = []\n #should do this better with joins!\n for sighting in LTESighting.query:\n if (str(sighting.smallcell.site_id)) in site_ids : results.append(sighting.serialise())\n\n return make_response(jsonify({ 'list' : results })), 200\n\n @app.route('/api/origindestination/all', methods=['GET'])\n def get_all():\n journeys = Journey.query.all()\n thing = {}\n for journey in journeys:\n if (journey.origin_id not in thing) :\n thing[journey.origin_id] = {}\n if (journey.destination_id not in thing[journey.origin_id] and journey.destination_id != journey.origin_id) :\n thing[journey.origin_id][journey.destination_id] = journey.data['total']\n\n return make_response(jsonify(thing)), 200\n\n @app.route('/api/origindestination/<origin_id>', methods=['GET'])\n def get_od(origin_id):\n journeys = Journey.query.all()#.filter_by(origin_id=origin_id).all()\n _j = []\n for journey in journeys:\n _j.append({'origin_id' : journey.origin_id, 'destination_id' : journey.destination_id, 'total' : journey.data['total']})\n #_j.append({'origin_id' : journey.origin_id, 'data' : (journey.data)})\n\n return make_response(jsonify({ 'list' : _j })), 200\n\n @app.route('/api/ng_event/purchase/<home_district_name>/<type_visitor>', methods=['GET'])\n def purchase(home_district_name, type_visitor):\n\n days_sql = db.session.query(PurchDistrict.start_dow, func.count(PurchDistrict.start_dow))\\\n .group_by(PurchDistrict.start_dow)\\\n .filter(PurchDistrict.home_district_name.in_([home_district_name]))\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor]))\\\n .order_by(func.count(PurchDistrict.start_dow).desc())\\\n .all()\n\n gender_sql = db.session.query(PurchDistrict.gender, func.count(PurchDistrict.gender))\\\n .group_by(PurchDistrict.gender)\\\n .filter(PurchDistrict.home_district_name.in_([home_district_name]))\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor])).all()\n\n gender_age_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, func.count(PurchDistrict.gender))\\\n .group_by(PurchDistrict.gender, PurchDistrict.age)\\\n .filter(PurchDistrict.gender.isnot(None))\\\n .filter(PurchDistrict.age.isnot(None))\\\n .filter(PurchDistrict.home_district_name.in_([home_district_name]))\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor])).all()\n\n\n gender_age_rent_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict.gender))\\\n .group_by(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent)\\\n .filter(PurchDistrict.gender.isnot(None))\\\n .filter(PurchDistrict.age.isnot(None))\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor])).all()\n\n days_total = sum(i[1] for i in days_sql)\n gender_total = sum(i[1] for i in gender_sql)\n gender_age_total = sum(i[2] for i in gender_age_sql)\n\n days_results = []\n for result in days_sql:\n days_results.append({ 'start_dow' : result.start_dow, 'count' : result[1], 'percent' : float(result[1])/float(days_total), 'total' : days_total})\n\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender' : result.gender, 'count' : result[1], 'percent' : float(result[1])/float(gender_total)})\n\n gender_age_results = []\n for result in gender_age_sql:\n gender_age_results.append({'gender' : result.gender, 'age' : result.age, 'count' : result[2], 'percent' : float(result[2])/float(gender_age_total)})\n\n return make_response(jsonify({'days' : days_results, 'gender' : gender_results, 'gender_age' : gender_age_results})), 200\n\n\n @app.route('/api/ng_event/purchase_affluence/<type_visitor>', methods=['GET'])\n def purchase_rent(type_visitor):\n\n gender_sql = db.session.query(PurchDistrict.gender, func.count(PurchDistrict.gender))\\\n .group_by(PurchDistrict.gender)\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor])).all()\n\n gender_age_rent_sql = db.session.query(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent, func.count(PurchDistrict.gender))\\\n .group_by(PurchDistrict.gender, PurchDistrict.age, PurchDistrict.rent)\\\n .filter(PurchDistrict.gender.isnot(None))\\\n .filter(PurchDistrict.age.isnot(None))\\\n .filter(PurchDistrict.type_visitor.in_([type_visitor])).all()\n\n gender_total = sum(i[1] for i in gender_sql)\n\n gender_results = []\n for result in gender_sql:\n gender_results.append({'gender' : result.gender, 'count' : result[1], 'percent' : float(result[1])/float(gender_total)})\n\n gender_age_rent_results = []\n for result in gender_age_rent_sql:\n gender_age_rent_results.append({'gender' : result.gender, 'age' : result.age, 'rent' : result.rent, 'count' : result[3]})\n\n return make_response(jsonify({'gender' : gender_results, 'gender_age_rent' : gender_age_rent_results})), 200\n\n\n @app.route('/api/ng_event/districts', methods=['GET'])\n def districts():\n\n home_results = []\n for result in db.session.query(ZoneDistrict.home_district_code, ZoneDistrict.home_district_name, func.sum(ZoneDistrict.visitors)).group_by(ZoneDistrict.home_district_code, ZoneDistrict.home_district_name).all():\n home_results.append({'district_code' : result.home_district_code, 'district_name' : result.home_district_name, 'visitors' : result[2]})\n\n work_results = []\n for result in db.session.query(ZoneDistrict.work_district_code, ZoneDistrict.work_district_name, func.sum(ZoneDistrict.visitors)).group_by(ZoneDistrict.work_district_code, ZoneDistrict.work_district_name).all():\n work_results.append({'district_code' : result.work_district_code, 'district_name' : result.work_district_name, 'visitors' : result[2]})\n\n return make_response(jsonify({'work' : { 'list' : work_results }, 'home' : { 'list' : home_results }})), 200\n\n\n @app.route('/api/ng_event/attractiontotals', methods=['GET'])\n def attractiontotals():\n\n results = []\n for result in db.session.query(AttractionTotal.zone_visitors, AttractionTotal.num_visitors).all():\n results.append({'zone_visitors' : result.zone_visitors, 'num_visitors' : result.num_visitors})\n\n return make_response(jsonify({'totals' : { 'list' : results }})), 200\n\n\n @app.route('/api/ng_event/profiles', methods=['GET'])\n def profiles():\n\n results = []\n for result in db.session.query(Profile.country, Profile.nationality, Profile.name_province, Profile.gender, Profile.age, Profile.rent, Profile.type_visitor, Profile.date, Profile.day, Profile.period, Profile.name_tur_zone).limit(10000):\n district = ''\n if result.name_tur_zone == 'Zone 1' : district = 'Chamartin'\n if result.name_tur_zone == 'Zone 2' : district = 'Chamberi'\n if result.name_tur_zone == 'Zone 3' : district = 'Salamanca'\n\n day = ''\n if result.day == 'Monday' : day = 'Mon'\n if result.day == 'Tuesday' : day = 'Tue'\n if result.day == 'Wednesday' : day = 'Wed'\n if result.day == 'Thursday' : day = 'Thu'\n if result.day == 'Friday' : day = 'Fri'\n if result.day == 'Saturday' : day = 'Sat'\n if result.day == 'Sunday' : day = 'Sun'\n\n results.append({'country' : result.country, 'nationality' : result.nationality, 'name_province' : district, 'gender' : result.gender, 'age' : result.age, 'rent' : result.rent, 'type_visitor' : result.type_visitor, 'date' : result.date, 'day' : day, 'period' : result.period, 'zone' : result.name_tur_zone })\n\n return make_response(jsonify(results)), 200\n\n @app.route('/api/ng_event/dowfreq', methods=['GET'])\n def dowfreq():\n\n results = []\n for result in db.session.query(DOWFrequency.type_visitor, DOWFrequency.start_dow, DOWFrequency.start_hour, DOWFrequency.count).all():\n results.append({'type_visitor' : result.type_visitor, 'start_dow' : result.start_dow, 'start_hour' : result.start_hour, 'count' : result.count })\n\n return make_response(jsonify(results)), 200\n\n return app\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
from collections import deque
warp = dict()
u, v = map(int, input().split())
for _ in range(u + v):
s, e = map(int, input().split())
warp[s] = e
q = deque()
q.append(1)
check = [-1] * 101
check[1] = 0
while q:
now = q.popleft()
for k in range(1, 7):
if now + k <= 100 and check[now + k] == -1:
check[now + k] = check[now] + 1
if now + k in warp:
if check[warp[now + k]] == -1:
check[warp[now + k]] = check[now] + 1
q.append(warp[now + k])
else:
q.append(now + k)
print(check[100])
|
normal
|
{
"blob_id": "dd792c502317288644d4bf5d247999bb08d5f401",
"index": 5369,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor _ in range(u + v):\n s, e = map(int, input().split())\n warp[s] = e\n<mask token>\nq.append(1)\n<mask token>\nwhile q:\n now = q.popleft()\n for k in range(1, 7):\n if now + k <= 100 and check[now + k] == -1:\n check[now + k] = check[now] + 1\n if now + k in warp:\n if check[warp[now + k]] == -1:\n check[warp[now + k]] = check[now] + 1\n q.append(warp[now + k])\n else:\n q.append(now + k)\nprint(check[100])\n",
"step-3": "<mask token>\nwarp = dict()\nu, v = map(int, input().split())\nfor _ in range(u + v):\n s, e = map(int, input().split())\n warp[s] = e\nq = deque()\nq.append(1)\ncheck = [-1] * 101\ncheck[1] = 0\nwhile q:\n now = q.popleft()\n for k in range(1, 7):\n if now + k <= 100 and check[now + k] == -1:\n check[now + k] = check[now] + 1\n if now + k in warp:\n if check[warp[now + k]] == -1:\n check[warp[now + k]] = check[now] + 1\n q.append(warp[now + k])\n else:\n q.append(now + k)\nprint(check[100])\n",
"step-4": "from collections import deque\nwarp = dict()\nu, v = map(int, input().split())\nfor _ in range(u + v):\n s, e = map(int, input().split())\n warp[s] = e\nq = deque()\nq.append(1)\ncheck = [-1] * 101\ncheck[1] = 0\nwhile q:\n now = q.popleft()\n for k in range(1, 7):\n if now + k <= 100 and check[now + k] == -1:\n check[now + k] = check[now] + 1\n if now + k in warp:\n if check[warp[now + k]] == -1:\n check[warp[now + k]] = check[now] + 1\n q.append(warp[now + k])\n else:\n q.append(now + k)\nprint(check[100])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
ii = [('CoolWHM.py', 1), ('SoutRD.py', 1), ('BrewDTO.py', 2), (
'FitzRNS2.py', 1), ('LyelCPG3.py', 1), ('TaylIF.py', 2)]
|
normal
|
{
"blob_id": "fbba928d51ccd08dbac25fcf2098be3a0d494d34",
"index": 6659,
"step-1": "<mask token>\n",
"step-2": "ii = [('CoolWHM.py', 1), ('SoutRD.py', 1), ('BrewDTO.py', 2), (\n 'FitzRNS2.py', 1), ('LyelCPG3.py', 1), ('TaylIF.py', 2)]\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import pkg_resources
from twisted.enterprise import adbapi
from twisted.internet import defer
# Start a logger with a namespace for a particular subsystem of our application.
from twisted.logger import Logger
log = Logger("database")
class Database:
def __init__(self, context, db_filename="database.sqlite"):
# Get full path and filename for database
session_files = context["session_files"]
db_filename = session_files.session_dir / db_filename
# Note if database already exists
database_exists = db_filename.is_file()
# Callback for every connection that is established to the
# database
def setup_connection(connection):
# Turn on foreign key constraints
cursor = connection.cursor()
cursor.execute("PRAGMA foreign_keys = ON;")
# # Turn on column names in rows
# import sqlite3
# connection.row_factory = sqlite3.Row
# Open a connection to the database. SQLite will create the file if
# it doesn't already exist.
dbpool = adbapi.ConnectionPool(
"sqlite3",
db_filename,
cp_openfun=setup_connection,
check_same_thread=False # See https://twistedmatrix.com/trac/ticket/3629
)
# If the database did not exist, initialise the database
if not database_exists:
print("Database requires initialisation")
self._db_ready = dbpool.runInteraction(self._initialise_database)
def on_success(data):
log.info("Database successfully initialised")
return dbpool
def on_error(data):
log.error(f"Failed to initialise the server's database: {data}")
reactor = context["reactor"]
reactor.stop()
self._db_ready.addCallback(on_success)
self._db_ready.addErrback(on_error)
else:
# Database exists already
self._db_ready = defer.Deferred()
self._db_ready.callback(dbpool)
# Check that database is the correct version
expected_version = 4
def check_version(cursor):
cursor.execute("SELECT version FROM Version")
row = cursor.fetchone()
if row is None:
raise Exception("No version found in Version table of database")
if row[0] == expected_version:
log.info(f"Server database version {expected_version}")
return dbpool
else:
reactor = context["reactor"]
reactor.stop()
raise Exception(f"Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.")
def run_check_version(dbpool):
return dbpool.runInteraction(check_version)
d = self.get_dbpool()
d.addCallback(run_check_version)
def on_error(error):
log.error("Failed to verify the database: "+str(error))
reactor = context["reactor"]
reactor.stop()
d.addErrback(on_error)
# Initialise the database structure from instructions in file
def _initialise_database(self, cursor):
log.info("Initialising database")
initialisation_commands_filename = \
pkg_resources.resource_filename(
"singtserver",
"database.sql"
)
f = open(initialisation_commands_filename, "r")
initialisation_commands = f.read()
return cursor.executescript(initialisation_commands)
def get_dbpool(self):
d = defer.Deferred()
def db_ready(db):
d.callback(db)
return db
self._db_ready.addCallback(db_ready)
return d
def get_combination(self, track_id=None, take_ids=[]):
# Sanity check arguments
if (track_id is None
and len(take_ids) == 0):
raise Exception(
"Getting a combination from the database requires "+
"at least a Track ID or at least one Take ID"
)
# Get combination from database.
# See answers to https://stackoverflow.com/questions/63356820/sql-select-from-many-to-one
# and https://stackoverflow.com/a/5766293/562930
def get_combo(cursor):
if track_id is None:
assert len(take_ids) > 0
sql = (
"SELECT id\n"+
"FROM Combinations\n"+
"WHERE backingTrackId IS NULL\n"+
" AND id IN\n"+
" (SELECT combinationId\n"+
" FROM CombinationsDetail\n"+
" GROUP BY combinationId\n" +
" HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0) = ?".format(
seq=",".join(["?"]*len(take_ids))
)
)
cursor.execute(
sql,
(*take_ids, len(take_ids))
)
elif len(take_ids) == 0:
sql = (
"SELECT id\n"+
"FROM Combinations\n"+
"WHERE backingTrackId = ?\n"+
" AND NOT EXISTS\n"+
" (SELECT * \n"+
" FROM CombinationsDetail\n"+
" WHERE combinationId = Combinations.id)"
)
cursor.execute(
sql,
(track_id, )
)
else:
sql = ("SELECT id\n"+
"FROM Combinations\n"+
"WHERE backingTrackId = ?\n"+
" AND id IN\n"+
" (SELECT combinationId\n"+
" FROM CombinationsDetail\n"+
" GROUP BY combinationId\n" +
" HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0 END) = ?)").format(
seq=",".join(['?']*len(take_ids))
)
cursor.execute(
sql,
(track_id, *take_ids, len(take_ids))
)
# Although there should be at most only one combo id that
# matches the track and takes specification, even if there
# are more than one, we'll just return the first (or None
# if there aren't any).
row = cursor.fetchone()
if row is None:
return None
combo_id = row[0]
return combo_id
def when_ready(dbpool):
return dbpool.runInteraction(get_combo)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_success(data):
log.info("Successfully added combination to database; combination id: "+str(data))
return data
d.addCallback(on_success)
def on_error(error):
log.error("Failed to add combination to the database: "+str(error))
raise Exception("Failed to add combination to the database")
d.addErrback(on_error)
return d
def add_combination(self, track_id=None, take_ids=[]):
"""Adds combination into database.
Returns combo_id.
"""
log.info(f"Adding combination to database with track id = {track_id} and take_ids = {take_ids}")
# Sanity check arguments
if (track_id is None
and len(take_ids) == 0):
raise Exception(
"Adding a combination to the database requires "+
"at least a Track ID or at least one Take ID"
)
# Create combination in database
def add_combo(cursor):
# Create audio id
cursor.execute("INSERT INTO AudioIdentifiers DEFAULT VALUES")
audio_id = cursor.lastrowid
print("track_id:", track_id)
cursor.execute(
"INSERT INTO Combinations (audioId, backingTrackId) VALUES (?, ?)",
(audio_id, track_id)
)
combo_id = cursor.lastrowid
for take_id in take_ids:
cursor.execute(
"INSERT INTO CombinationsDetail (combinationId, takeId) "+
"VALUES (?,?)",
(combo_id, take_id)
)
return combo_id
def when_ready(dbpool):
return dbpool.runInteraction(add_combo)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_success(data):
log.info("Successfully added combination to database; combination id: "+str(data))
return data
def on_error(error):
log.error("Failed to add combination to the database: "+str(error))
raise Exception("Failed to add combination to the database")
d.addCallback(on_success)
d.addErrback(on_error)
return d
def get_track_audio_id(self, track_id):
"""Returns track's audio id or None."""
def execute_sql(cursor):
cursor.execute("SELECT audioId FROM BackingTracks WHERE id = ?",
(track_id,))
results = cursor.fetchone()
if results is None:
return None
else:
return results[0]
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn("Failed to get audio ID for track id ({track_id}): "+
str(error)
)
return error
d.addErrback(on_error)
return d
def get_take_audio_id(self, take_id):
"""Returns take's audio id or None."""
def execute_sql(cursor):
cursor.execute("SELECT audioId FROM Takes WHERE id = ?",
(take_id,))
results = cursor.fetchone()
if results is None:
return None
else:
return results[0]
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn("Failed to get audio ID for take id ({take_id}): "+
str(error)
)
return error
d.addErrback(on_error)
return d
def assign_participant(self, client_id, name):
"""Assigns the name to the client id."""
def execute_sql(cursor):
# First, check if the id already exists
cursor.execute(
"SELECT participantName FROM Participants WHERE id = ?",
(client_id,)
)
row = cursor.fetchone()
if row is None:
# We don't currently have this ID, insert it
cursor.execute(
"INSERT INTO Participants (id, participantName) "+
"VALUES (?, ?)",
(client_id, name)
)
return client_id
# Otherwise, a row does already exist
current_name = row[0]
if name == current_name:
# We have nothing to do, the database is already
# correct
return client_id
# Otherwise, we need to update the database
cursor.execute(
"UPDATE Participants SET participantName = ? WHERE id = ?",
(name, client_id)
)
return client_id
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn(
"Failed to add participant given name '{name}' and id '{client_id}': "+
str(error)
)
return error
d.addErrback(on_error)
return d
def get_participants(self):
def execute_sql(cursor):
cursor.execute("SELECT id, participantName FROM Participants")
rows = cursor.fetchall()
results = [{"id":id_, "name":name} for id_, name in rows]
return results
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn(
"Failed to get participant list: "+
str(error)
)
return error
d.addErrback(on_error)
return d
def get_audio_ids_from_combination_id(self, combination_id):
def execute_sql(cursor):
# Get Track ID. There should be either zero or one, but
# not more.
cursor.execute(
"SELECT BackingTracks.audioId\n"+
"FROM Combinations\n"+
"LEFT JOIN BackingTracks\n"+
"ON Combinations.backingTrackId = BackingTracks.id\n"+
"WHERE combinations.id = ?",
(combination_id,)
)
rows = cursor.fetchall()
if len(rows) == 0:
# We don't have a backing track; that's fine, move on
# to the takes.
backing_audio_ids = []
elif len(rows) == 1:
# We have one backing track
row = rows[0]
audio_id = row[0]
backing_audio_ids = [audio_id]
else:
# We have more than one backing track; error.
raise Exception(
f"More than one backing track matched "+
f"combination id {combination_id}; this "+
f"shouldn't be possible"
)
# Get the Take IDs. There may be many of these. But if
# there wasn't a backing track id, then there needs to be
# at least one Take ID.
cursor.execute(
"SELECT audioId\n"+
"FROM CombinationsDetail\n"+
"LEFT JOIN Takes\n"+
"ON CombinationsDetail.id = Takes.combinationId\n"+
"WHERE CombinationsDetail.combinationId = ?",
(combination_id,)
)
rows = cursor.fetchall()
if len(rows) == 0:
# This is only as issue if we don't have any backing
# tracks either
if len(backing_audio_ids) == 0:
raise Exception(
f"We have neither a backing track nor takes "+
f"for the given combination id ({combination_id});"+
f"this shouldn't be possible"
)
else:
# Add the Take IDs to the list
takes_audio_ids = [row[0] for row in rows]
backing_audio_ids += takes_audio_ids
return backing_audio_ids
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn(
"Failed to get backing audio ids from combination id: "+
str(error)
)
return error
d.addErrback(on_error)
return d
def add_take(self, take_name, combination_id):
def execute_sql(cursor):
# Create audio id
cursor.execute("INSERT INTO AudioIdentifiers DEFAULT VALUES")
audio_id = cursor.lastrowid
# Create take
cursor.execute(
"INSERT INTO Takes (audioId, combinationId, takeName, complete) "+
"VALUES (?, ?, ?, 0)",
(audio_id, combination_id, take_name)
)
take_id = cursor.lastrowid
return take_id
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn(
"Failed to add take: "+
str(error)
)
return error
d.addErrback(on_error)
return d
def add_recording_audio_ids(self, take_id, participants):
def execute_sql(cursor):
audio_ids = {}
for participant_id in participants:
# Create audio id
cursor.execute("INSERT INTO AudioIdentifiers DEFAULT VALUES")
audio_id = cursor.lastrowid
# Add entry into Recordings
cursor.execute(
"INSERT INTO Recordings "+
"(audioId, participantId, takeId, complete) "+
"VALUES (?, ?, ?, 0)",
(audio_id, participant_id, take_id)
)
audio_ids[participant_id] = audio_id
return audio_ids
def when_ready(dbpool):
return dbpool.runInteraction(execute_sql)
d = self.get_dbpool()
d.addCallback(when_ready)
def on_error(error):
log.warn(
"Failed to add recordings for participants: "+
str(error)
)
return error
d.addErrback(on_error)
return d
|
normal
|
{
"blob_id": "45c1510d19af0979326a1b9975ec363b0b80a291",
"index": 8123,
"step-1": "<mask token>\n\n\nclass Database:\n\n def __init__(self, context, db_filename='database.sqlite'):\n session_files = context['session_files']\n db_filename = session_files.session_dir / db_filename\n database_exists = db_filename.is_file()\n\n def setup_connection(connection):\n cursor = connection.cursor()\n cursor.execute('PRAGMA foreign_keys = ON;')\n dbpool = adbapi.ConnectionPool('sqlite3', db_filename, cp_openfun=\n setup_connection, check_same_thread=False)\n if not database_exists:\n print('Database requires initialisation')\n self._db_ready = dbpool.runInteraction(self._initialise_database)\n\n def on_success(data):\n log.info('Database successfully initialised')\n return dbpool\n\n def on_error(data):\n log.error(f\"Failed to initialise the server's database: {data}\"\n )\n reactor = context['reactor']\n reactor.stop()\n self._db_ready.addCallback(on_success)\n self._db_ready.addErrback(on_error)\n else:\n self._db_ready = defer.Deferred()\n self._db_ready.callback(dbpool)\n expected_version = 4\n\n def check_version(cursor):\n cursor.execute('SELECT version FROM Version')\n row = cursor.fetchone()\n if row is None:\n raise Exception('No version found in Version table of database'\n )\n if row[0] == expected_version:\n log.info(f'Server database version {expected_version}')\n return dbpool\n else:\n reactor = context['reactor']\n reactor.stop()\n raise Exception(\n f'Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.'\n )\n\n def run_check_version(dbpool):\n return dbpool.runInteraction(check_version)\n d = self.get_dbpool()\n d.addCallback(run_check_version)\n\n def on_error(error):\n log.error('Failed to verify the database: ' + str(error))\n reactor = context['reactor']\n reactor.stop()\n d.addErrback(on_error)\n\n def _initialise_database(self, cursor):\n log.info('Initialising database')\n initialisation_commands_filename = pkg_resources.resource_filename(\n 'singtserver', 'database.sql')\n f = open(initialisation_commands_filename, 'r')\n initialisation_commands = f.read()\n return cursor.executescript(initialisation_commands)\n <mask token>\n <mask token>\n <mask token>\n\n def get_track_audio_id(self, track_id):\n \"\"\"Returns track's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM BackingTracks WHERE id = ?',\n (track_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for track id ({track_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n <mask token>\n\n def assign_participant(self, client_id, name):\n \"\"\"Assigns the name to the client id.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute(\n 'SELECT participantName FROM Participants WHERE id = ?', (\n client_id,))\n row = cursor.fetchone()\n if row is None:\n cursor.execute(\n 'INSERT INTO Participants (id, participantName) ' +\n 'VALUES (?, ?)', (client_id, name))\n return client_id\n current_name = row[0]\n if name == current_name:\n return client_id\n cursor.execute(\n 'UPDATE Participants SET participantName = ? WHERE id = ?',\n (name, client_id))\n return client_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add participant given name '{name}' and id '{client_id}': \"\n + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_participants(self):\n\n def execute_sql(cursor):\n cursor.execute('SELECT id, participantName FROM Participants')\n rows = cursor.fetchall()\n results = [{'id': id_, 'name': name} for id_, name in rows]\n return results\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get participant list: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_audio_ids_from_combination_id(self, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('SELECT BackingTracks.audioId\\n' +\n 'FROM Combinations\\n' + \"\"\"LEFT JOIN BackingTracks\n\"\"\" +\n 'ON Combinations.backingTrackId = BackingTracks.id\\n' +\n 'WHERE combinations.id = ?', (combination_id,))\n rows = cursor.fetchall()\n if len(rows) == 0:\n backing_audio_ids = []\n elif len(rows) == 1:\n row = rows[0]\n audio_id = row[0]\n backing_audio_ids = [audio_id]\n else:\n raise Exception(f'More than one backing track matched ' +\n f'combination id {combination_id}; this ' +\n f\"shouldn't be possible\")\n cursor.execute('SELECT audioId\\n' + 'FROM CombinationsDetail\\n' +\n 'LEFT JOIN Takes\\n' +\n \"\"\"ON CombinationsDetail.id = Takes.combinationId\n\"\"\" +\n 'WHERE CombinationsDetail.combinationId = ?', (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n if len(backing_audio_ids) == 0:\n raise Exception(\n f'We have neither a backing track nor takes ' +\n f'for the given combination id ({combination_id});' +\n f\"this shouldn't be possible\")\n else:\n takes_audio_ids = [row[0] for row in rows]\n backing_audio_ids += takes_audio_ids\n return backing_audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n 'Failed to get backing audio ids from combination id: ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n <mask token>\n\n def add_recording_audio_ids(self, take_id, participants):\n\n def execute_sql(cursor):\n audio_ids = {}\n for participant_id in participants:\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute('INSERT INTO Recordings ' +\n '(audioId, participantId, takeId, complete) ' +\n 'VALUES (?, ?, ?, 0)', (audio_id, participant_id, take_id))\n audio_ids[participant_id] = audio_id\n return audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add recordings for participants: ' + str(error)\n )\n return error\n d.addErrback(on_error)\n return d\n",
"step-2": "<mask token>\n\n\nclass Database:\n\n def __init__(self, context, db_filename='database.sqlite'):\n session_files = context['session_files']\n db_filename = session_files.session_dir / db_filename\n database_exists = db_filename.is_file()\n\n def setup_connection(connection):\n cursor = connection.cursor()\n cursor.execute('PRAGMA foreign_keys = ON;')\n dbpool = adbapi.ConnectionPool('sqlite3', db_filename, cp_openfun=\n setup_connection, check_same_thread=False)\n if not database_exists:\n print('Database requires initialisation')\n self._db_ready = dbpool.runInteraction(self._initialise_database)\n\n def on_success(data):\n log.info('Database successfully initialised')\n return dbpool\n\n def on_error(data):\n log.error(f\"Failed to initialise the server's database: {data}\"\n )\n reactor = context['reactor']\n reactor.stop()\n self._db_ready.addCallback(on_success)\n self._db_ready.addErrback(on_error)\n else:\n self._db_ready = defer.Deferred()\n self._db_ready.callback(dbpool)\n expected_version = 4\n\n def check_version(cursor):\n cursor.execute('SELECT version FROM Version')\n row = cursor.fetchone()\n if row is None:\n raise Exception('No version found in Version table of database'\n )\n if row[0] == expected_version:\n log.info(f'Server database version {expected_version}')\n return dbpool\n else:\n reactor = context['reactor']\n reactor.stop()\n raise Exception(\n f'Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.'\n )\n\n def run_check_version(dbpool):\n return dbpool.runInteraction(check_version)\n d = self.get_dbpool()\n d.addCallback(run_check_version)\n\n def on_error(error):\n log.error('Failed to verify the database: ' + str(error))\n reactor = context['reactor']\n reactor.stop()\n d.addErrback(on_error)\n\n def _initialise_database(self, cursor):\n log.info('Initialising database')\n initialisation_commands_filename = pkg_resources.resource_filename(\n 'singtserver', 'database.sql')\n f = open(initialisation_commands_filename, 'r')\n initialisation_commands = f.read()\n return cursor.executescript(initialisation_commands)\n\n def get_dbpool(self):\n d = defer.Deferred()\n\n def db_ready(db):\n d.callback(db)\n return db\n self._db_ready.addCallback(db_ready)\n return d\n <mask token>\n <mask token>\n\n def get_track_audio_id(self, track_id):\n \"\"\"Returns track's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM BackingTracks WHERE id = ?',\n (track_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for track id ({track_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n <mask token>\n\n def assign_participant(self, client_id, name):\n \"\"\"Assigns the name to the client id.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute(\n 'SELECT participantName FROM Participants WHERE id = ?', (\n client_id,))\n row = cursor.fetchone()\n if row is None:\n cursor.execute(\n 'INSERT INTO Participants (id, participantName) ' +\n 'VALUES (?, ?)', (client_id, name))\n return client_id\n current_name = row[0]\n if name == current_name:\n return client_id\n cursor.execute(\n 'UPDATE Participants SET participantName = ? WHERE id = ?',\n (name, client_id))\n return client_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add participant given name '{name}' and id '{client_id}': \"\n + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_participants(self):\n\n def execute_sql(cursor):\n cursor.execute('SELECT id, participantName FROM Participants')\n rows = cursor.fetchall()\n results = [{'id': id_, 'name': name} for id_, name in rows]\n return results\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get participant list: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_audio_ids_from_combination_id(self, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('SELECT BackingTracks.audioId\\n' +\n 'FROM Combinations\\n' + \"\"\"LEFT JOIN BackingTracks\n\"\"\" +\n 'ON Combinations.backingTrackId = BackingTracks.id\\n' +\n 'WHERE combinations.id = ?', (combination_id,))\n rows = cursor.fetchall()\n if len(rows) == 0:\n backing_audio_ids = []\n elif len(rows) == 1:\n row = rows[0]\n audio_id = row[0]\n backing_audio_ids = [audio_id]\n else:\n raise Exception(f'More than one backing track matched ' +\n f'combination id {combination_id}; this ' +\n f\"shouldn't be possible\")\n cursor.execute('SELECT audioId\\n' + 'FROM CombinationsDetail\\n' +\n 'LEFT JOIN Takes\\n' +\n \"\"\"ON CombinationsDetail.id = Takes.combinationId\n\"\"\" +\n 'WHERE CombinationsDetail.combinationId = ?', (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n if len(backing_audio_ids) == 0:\n raise Exception(\n f'We have neither a backing track nor takes ' +\n f'for the given combination id ({combination_id});' +\n f\"this shouldn't be possible\")\n else:\n takes_audio_ids = [row[0] for row in rows]\n backing_audio_ids += takes_audio_ids\n return backing_audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n 'Failed to get backing audio ids from combination id: ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n <mask token>\n\n def add_recording_audio_ids(self, take_id, participants):\n\n def execute_sql(cursor):\n audio_ids = {}\n for participant_id in participants:\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute('INSERT INTO Recordings ' +\n '(audioId, participantId, takeId, complete) ' +\n 'VALUES (?, ?, ?, 0)', (audio_id, participant_id, take_id))\n audio_ids[participant_id] = audio_id\n return audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add recordings for participants: ' + str(error)\n )\n return error\n d.addErrback(on_error)\n return d\n",
"step-3": "<mask token>\n\n\nclass Database:\n\n def __init__(self, context, db_filename='database.sqlite'):\n session_files = context['session_files']\n db_filename = session_files.session_dir / db_filename\n database_exists = db_filename.is_file()\n\n def setup_connection(connection):\n cursor = connection.cursor()\n cursor.execute('PRAGMA foreign_keys = ON;')\n dbpool = adbapi.ConnectionPool('sqlite3', db_filename, cp_openfun=\n setup_connection, check_same_thread=False)\n if not database_exists:\n print('Database requires initialisation')\n self._db_ready = dbpool.runInteraction(self._initialise_database)\n\n def on_success(data):\n log.info('Database successfully initialised')\n return dbpool\n\n def on_error(data):\n log.error(f\"Failed to initialise the server's database: {data}\"\n )\n reactor = context['reactor']\n reactor.stop()\n self._db_ready.addCallback(on_success)\n self._db_ready.addErrback(on_error)\n else:\n self._db_ready = defer.Deferred()\n self._db_ready.callback(dbpool)\n expected_version = 4\n\n def check_version(cursor):\n cursor.execute('SELECT version FROM Version')\n row = cursor.fetchone()\n if row is None:\n raise Exception('No version found in Version table of database'\n )\n if row[0] == expected_version:\n log.info(f'Server database version {expected_version}')\n return dbpool\n else:\n reactor = context['reactor']\n reactor.stop()\n raise Exception(\n f'Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.'\n )\n\n def run_check_version(dbpool):\n return dbpool.runInteraction(check_version)\n d = self.get_dbpool()\n d.addCallback(run_check_version)\n\n def on_error(error):\n log.error('Failed to verify the database: ' + str(error))\n reactor = context['reactor']\n reactor.stop()\n d.addErrback(on_error)\n\n def _initialise_database(self, cursor):\n log.info('Initialising database')\n initialisation_commands_filename = pkg_resources.resource_filename(\n 'singtserver', 'database.sql')\n f = open(initialisation_commands_filename, 'r')\n initialisation_commands = f.read()\n return cursor.executescript(initialisation_commands)\n\n def get_dbpool(self):\n d = defer.Deferred()\n\n def db_ready(db):\n d.callback(db)\n return db\n self._db_ready.addCallback(db_ready)\n return d\n\n def get_combination(self, track_id=None, take_ids=[]):\n if track_id is None and len(take_ids) == 0:\n raise Exception(\n 'Getting a combination from the database requires ' +\n 'at least a Track ID or at least one Take ID')\n\n def get_combo(cursor):\n if track_id is None:\n assert len(take_ids) > 0\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n \"\"\"WHERE backingTrackId IS NULL\n\"\"\" +\n ' AND id IN\\n' + ' (SELECT combinationId\\n' +\n ' FROM CombinationsDetail\\n' +\n \"\"\" GROUP BY combinationId\n\"\"\" +\n ' HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0) = ?'\n .format(seq=','.join(['?'] * len(take_ids))))\n cursor.execute(sql, (*take_ids, len(take_ids)))\n elif len(take_ids) == 0:\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n 'WHERE backingTrackId = ?\\n' +\n \"\"\" AND NOT EXISTS\n\"\"\" + ' (SELECT * \\n' +\n ' FROM CombinationsDetail\\n' +\n ' WHERE combinationId = Combinations.id)')\n cursor.execute(sql, (track_id,))\n else:\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n 'WHERE backingTrackId = ?\\n' + ' AND id IN\\n' +\n \"\"\" (SELECT combinationId\n\"\"\" +\n ' FROM CombinationsDetail\\n' +\n ' GROUP BY combinationId\\n' +\n ' HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0 END) = ?)'\n ).format(seq=','.join(['?'] * len(take_ids)))\n cursor.execute(sql, (track_id, *take_ids, len(take_ids)))\n row = cursor.fetchone()\n if row is None:\n return None\n combo_id = row[0]\n return combo_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(get_combo)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_success(data):\n log.info(\n 'Successfully added combination to database; combination id: '\n + str(data))\n return data\n d.addCallback(on_success)\n\n def on_error(error):\n log.error('Failed to add combination to the database: ' + str(\n error))\n raise Exception('Failed to add combination to the database')\n d.addErrback(on_error)\n return d\n <mask token>\n\n def get_track_audio_id(self, track_id):\n \"\"\"Returns track's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM BackingTracks WHERE id = ?',\n (track_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for track id ({track_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_take_audio_id(self, take_id):\n \"\"\"Returns take's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM Takes WHERE id = ?', (take_id,)\n )\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for take id ({take_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def assign_participant(self, client_id, name):\n \"\"\"Assigns the name to the client id.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute(\n 'SELECT participantName FROM Participants WHERE id = ?', (\n client_id,))\n row = cursor.fetchone()\n if row is None:\n cursor.execute(\n 'INSERT INTO Participants (id, participantName) ' +\n 'VALUES (?, ?)', (client_id, name))\n return client_id\n current_name = row[0]\n if name == current_name:\n return client_id\n cursor.execute(\n 'UPDATE Participants SET participantName = ? WHERE id = ?',\n (name, client_id))\n return client_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add participant given name '{name}' and id '{client_id}': \"\n + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_participants(self):\n\n def execute_sql(cursor):\n cursor.execute('SELECT id, participantName FROM Participants')\n rows = cursor.fetchall()\n results = [{'id': id_, 'name': name} for id_, name in rows]\n return results\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get participant list: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_audio_ids_from_combination_id(self, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('SELECT BackingTracks.audioId\\n' +\n 'FROM Combinations\\n' + \"\"\"LEFT JOIN BackingTracks\n\"\"\" +\n 'ON Combinations.backingTrackId = BackingTracks.id\\n' +\n 'WHERE combinations.id = ?', (combination_id,))\n rows = cursor.fetchall()\n if len(rows) == 0:\n backing_audio_ids = []\n elif len(rows) == 1:\n row = rows[0]\n audio_id = row[0]\n backing_audio_ids = [audio_id]\n else:\n raise Exception(f'More than one backing track matched ' +\n f'combination id {combination_id}; this ' +\n f\"shouldn't be possible\")\n cursor.execute('SELECT audioId\\n' + 'FROM CombinationsDetail\\n' +\n 'LEFT JOIN Takes\\n' +\n \"\"\"ON CombinationsDetail.id = Takes.combinationId\n\"\"\" +\n 'WHERE CombinationsDetail.combinationId = ?', (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n if len(backing_audio_ids) == 0:\n raise Exception(\n f'We have neither a backing track nor takes ' +\n f'for the given combination id ({combination_id});' +\n f\"this shouldn't be possible\")\n else:\n takes_audio_ids = [row[0] for row in rows]\n backing_audio_ids += takes_audio_ids\n return backing_audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n 'Failed to get backing audio ids from combination id: ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def add_take(self, take_name, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute(\n 'INSERT INTO Takes (audioId, combinationId, takeName, complete) '\n + 'VALUES (?, ?, ?, 0)', (audio_id, combination_id, take_name)\n )\n take_id = cursor.lastrowid\n return take_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add take: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def add_recording_audio_ids(self, take_id, participants):\n\n def execute_sql(cursor):\n audio_ids = {}\n for participant_id in participants:\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute('INSERT INTO Recordings ' +\n '(audioId, participantId, takeId, complete) ' +\n 'VALUES (?, ?, ?, 0)', (audio_id, participant_id, take_id))\n audio_ids[participant_id] = audio_id\n return audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add recordings for participants: ' + str(error)\n )\n return error\n d.addErrback(on_error)\n return d\n",
"step-4": "import pkg_resources\nfrom twisted.enterprise import adbapi\nfrom twisted.internet import defer\nfrom twisted.logger import Logger\nlog = Logger('database')\n\n\nclass Database:\n\n def __init__(self, context, db_filename='database.sqlite'):\n session_files = context['session_files']\n db_filename = session_files.session_dir / db_filename\n database_exists = db_filename.is_file()\n\n def setup_connection(connection):\n cursor = connection.cursor()\n cursor.execute('PRAGMA foreign_keys = ON;')\n dbpool = adbapi.ConnectionPool('sqlite3', db_filename, cp_openfun=\n setup_connection, check_same_thread=False)\n if not database_exists:\n print('Database requires initialisation')\n self._db_ready = dbpool.runInteraction(self._initialise_database)\n\n def on_success(data):\n log.info('Database successfully initialised')\n return dbpool\n\n def on_error(data):\n log.error(f\"Failed to initialise the server's database: {data}\"\n )\n reactor = context['reactor']\n reactor.stop()\n self._db_ready.addCallback(on_success)\n self._db_ready.addErrback(on_error)\n else:\n self._db_ready = defer.Deferred()\n self._db_ready.callback(dbpool)\n expected_version = 4\n\n def check_version(cursor):\n cursor.execute('SELECT version FROM Version')\n row = cursor.fetchone()\n if row is None:\n raise Exception('No version found in Version table of database'\n )\n if row[0] == expected_version:\n log.info(f'Server database version {expected_version}')\n return dbpool\n else:\n reactor = context['reactor']\n reactor.stop()\n raise Exception(\n f'Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.'\n )\n\n def run_check_version(dbpool):\n return dbpool.runInteraction(check_version)\n d = self.get_dbpool()\n d.addCallback(run_check_version)\n\n def on_error(error):\n log.error('Failed to verify the database: ' + str(error))\n reactor = context['reactor']\n reactor.stop()\n d.addErrback(on_error)\n\n def _initialise_database(self, cursor):\n log.info('Initialising database')\n initialisation_commands_filename = pkg_resources.resource_filename(\n 'singtserver', 'database.sql')\n f = open(initialisation_commands_filename, 'r')\n initialisation_commands = f.read()\n return cursor.executescript(initialisation_commands)\n\n def get_dbpool(self):\n d = defer.Deferred()\n\n def db_ready(db):\n d.callback(db)\n return db\n self._db_ready.addCallback(db_ready)\n return d\n\n def get_combination(self, track_id=None, take_ids=[]):\n if track_id is None and len(take_ids) == 0:\n raise Exception(\n 'Getting a combination from the database requires ' +\n 'at least a Track ID or at least one Take ID')\n\n def get_combo(cursor):\n if track_id is None:\n assert len(take_ids) > 0\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n \"\"\"WHERE backingTrackId IS NULL\n\"\"\" +\n ' AND id IN\\n' + ' (SELECT combinationId\\n' +\n ' FROM CombinationsDetail\\n' +\n \"\"\" GROUP BY combinationId\n\"\"\" +\n ' HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0) = ?'\n .format(seq=','.join(['?'] * len(take_ids))))\n cursor.execute(sql, (*take_ids, len(take_ids)))\n elif len(take_ids) == 0:\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n 'WHERE backingTrackId = ?\\n' +\n \"\"\" AND NOT EXISTS\n\"\"\" + ' (SELECT * \\n' +\n ' FROM CombinationsDetail\\n' +\n ' WHERE combinationId = Combinations.id)')\n cursor.execute(sql, (track_id,))\n else:\n sql = ('SELECT id\\n' + 'FROM Combinations\\n' +\n 'WHERE backingTrackId = ?\\n' + ' AND id IN\\n' +\n \"\"\" (SELECT combinationId\n\"\"\" +\n ' FROM CombinationsDetail\\n' +\n ' GROUP BY combinationId\\n' +\n ' HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0 END) = ?)'\n ).format(seq=','.join(['?'] * len(take_ids)))\n cursor.execute(sql, (track_id, *take_ids, len(take_ids)))\n row = cursor.fetchone()\n if row is None:\n return None\n combo_id = row[0]\n return combo_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(get_combo)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_success(data):\n log.info(\n 'Successfully added combination to database; combination id: '\n + str(data))\n return data\n d.addCallback(on_success)\n\n def on_error(error):\n log.error('Failed to add combination to the database: ' + str(\n error))\n raise Exception('Failed to add combination to the database')\n d.addErrback(on_error)\n return d\n\n def add_combination(self, track_id=None, take_ids=[]):\n \"\"\"Adds combination into database.\n\n Returns combo_id.\n \"\"\"\n log.info(\n f'Adding combination to database with track id = {track_id} and take_ids = {take_ids}'\n )\n if track_id is None and len(take_ids) == 0:\n raise Exception(\n 'Adding a combination to the database requires ' +\n 'at least a Track ID or at least one Take ID')\n\n def add_combo(cursor):\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n print('track_id:', track_id)\n cursor.execute(\n 'INSERT INTO Combinations (audioId, backingTrackId) VALUES (?, ?)'\n , (audio_id, track_id))\n combo_id = cursor.lastrowid\n for take_id in take_ids:\n cursor.execute(\n 'INSERT INTO CombinationsDetail (combinationId, takeId) ' +\n 'VALUES (?,?)', (combo_id, take_id))\n return combo_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(add_combo)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_success(data):\n log.info(\n 'Successfully added combination to database; combination id: '\n + str(data))\n return data\n\n def on_error(error):\n log.error('Failed to add combination to the database: ' + str(\n error))\n raise Exception('Failed to add combination to the database')\n d.addCallback(on_success)\n d.addErrback(on_error)\n return d\n\n def get_track_audio_id(self, track_id):\n \"\"\"Returns track's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM BackingTracks WHERE id = ?',\n (track_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for track id ({track_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_take_audio_id(self, take_id):\n \"\"\"Returns take's audio id or None.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute('SELECT audioId FROM Takes WHERE id = ?', (take_id,)\n )\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get audio ID for take id ({take_id}): ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def assign_participant(self, client_id, name):\n \"\"\"Assigns the name to the client id.\"\"\"\n\n def execute_sql(cursor):\n cursor.execute(\n 'SELECT participantName FROM Participants WHERE id = ?', (\n client_id,))\n row = cursor.fetchone()\n if row is None:\n cursor.execute(\n 'INSERT INTO Participants (id, participantName) ' +\n 'VALUES (?, ?)', (client_id, name))\n return client_id\n current_name = row[0]\n if name == current_name:\n return client_id\n cursor.execute(\n 'UPDATE Participants SET participantName = ? WHERE id = ?',\n (name, client_id))\n return client_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add participant given name '{name}' and id '{client_id}': \"\n + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_participants(self):\n\n def execute_sql(cursor):\n cursor.execute('SELECT id, participantName FROM Participants')\n rows = cursor.fetchall()\n results = [{'id': id_, 'name': name} for id_, name in rows]\n return results\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to get participant list: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def get_audio_ids_from_combination_id(self, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('SELECT BackingTracks.audioId\\n' +\n 'FROM Combinations\\n' + \"\"\"LEFT JOIN BackingTracks\n\"\"\" +\n 'ON Combinations.backingTrackId = BackingTracks.id\\n' +\n 'WHERE combinations.id = ?', (combination_id,))\n rows = cursor.fetchall()\n if len(rows) == 0:\n backing_audio_ids = []\n elif len(rows) == 1:\n row = rows[0]\n audio_id = row[0]\n backing_audio_ids = [audio_id]\n else:\n raise Exception(f'More than one backing track matched ' +\n f'combination id {combination_id}; this ' +\n f\"shouldn't be possible\")\n cursor.execute('SELECT audioId\\n' + 'FROM CombinationsDetail\\n' +\n 'LEFT JOIN Takes\\n' +\n \"\"\"ON CombinationsDetail.id = Takes.combinationId\n\"\"\" +\n 'WHERE CombinationsDetail.combinationId = ?', (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n if len(backing_audio_ids) == 0:\n raise Exception(\n f'We have neither a backing track nor takes ' +\n f'for the given combination id ({combination_id});' +\n f\"this shouldn't be possible\")\n else:\n takes_audio_ids = [row[0] for row in rows]\n backing_audio_ids += takes_audio_ids\n return backing_audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n 'Failed to get backing audio ids from combination id: ' +\n str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def add_take(self, take_name, combination_id):\n\n def execute_sql(cursor):\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute(\n 'INSERT INTO Takes (audioId, combinationId, takeName, complete) '\n + 'VALUES (?, ?, ?, 0)', (audio_id, combination_id, take_name)\n )\n take_id = cursor.lastrowid\n return take_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add take: ' + str(error))\n return error\n d.addErrback(on_error)\n return d\n\n def add_recording_audio_ids(self, take_id, participants):\n\n def execute_sql(cursor):\n audio_ids = {}\n for participant_id in participants:\n cursor.execute('INSERT INTO AudioIdentifiers DEFAULT VALUES')\n audio_id = cursor.lastrowid\n cursor.execute('INSERT INTO Recordings ' +\n '(audioId, participantId, takeId, complete) ' +\n 'VALUES (?, ?, ?, 0)', (audio_id, participant_id, take_id))\n audio_ids[participant_id] = audio_id\n return audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn('Failed to add recordings for participants: ' + str(error)\n )\n return error\n d.addErrback(on_error)\n return d\n",
"step-5": "import pkg_resources\nfrom twisted.enterprise import adbapi\nfrom twisted.internet import defer\n\n# Start a logger with a namespace for a particular subsystem of our application.\nfrom twisted.logger import Logger\nlog = Logger(\"database\")\n\nclass Database:\n def __init__(self, context, db_filename=\"database.sqlite\"):\n # Get full path and filename for database\n session_files = context[\"session_files\"]\n db_filename = session_files.session_dir / db_filename\n \n # Note if database already exists\n database_exists = db_filename.is_file()\n\n # Callback for every connection that is established to the\n # database\n def setup_connection(connection):\n # Turn on foreign key constraints\n cursor = connection.cursor()\n cursor.execute(\"PRAGMA foreign_keys = ON;\")\n\n # # Turn on column names in rows\n # import sqlite3\n # connection.row_factory = sqlite3.Row\n \n # Open a connection to the database. SQLite will create the file if\n # it doesn't already exist.\n dbpool = adbapi.ConnectionPool(\n \"sqlite3\",\n db_filename,\n cp_openfun=setup_connection,\n check_same_thread=False # See https://twistedmatrix.com/trac/ticket/3629\n )\n\n # If the database did not exist, initialise the database\n if not database_exists:\n print(\"Database requires initialisation\")\n self._db_ready = dbpool.runInteraction(self._initialise_database)\n def on_success(data):\n log.info(\"Database successfully initialised\")\n return dbpool\n def on_error(data):\n log.error(f\"Failed to initialise the server's database: {data}\")\n reactor = context[\"reactor\"]\n reactor.stop()\n\n self._db_ready.addCallback(on_success)\n self._db_ready.addErrback(on_error)\n else:\n # Database exists already\n self._db_ready = defer.Deferred()\n self._db_ready.callback(dbpool)\n\n # Check that database is the correct version\n expected_version = 4\n def check_version(cursor):\n cursor.execute(\"SELECT version FROM Version\")\n row = cursor.fetchone()\n if row is None:\n raise Exception(\"No version found in Version table of database\")\n if row[0] == expected_version:\n log.info(f\"Server database version {expected_version}\")\n return dbpool\n else:\n reactor = context[\"reactor\"]\n reactor.stop()\n raise Exception(f\"Database version ({row[0]}) did not match expected version ({expected_version}). Terminating.\")\n\n def run_check_version(dbpool):\n return dbpool.runInteraction(check_version)\n d = self.get_dbpool()\n d.addCallback(run_check_version)\n\n def on_error(error):\n log.error(\"Failed to verify the database: \"+str(error))\n reactor = context[\"reactor\"]\n reactor.stop()\n d.addErrback(on_error)\n\n \n # Initialise the database structure from instructions in file\n def _initialise_database(self, cursor):\n log.info(\"Initialising database\")\n initialisation_commands_filename = \\\n pkg_resources.resource_filename(\n \"singtserver\",\n \"database.sql\"\n )\n f = open(initialisation_commands_filename, \"r\")\n initialisation_commands = f.read()\n return cursor.executescript(initialisation_commands)\n\n\n def get_dbpool(self):\n d = defer.Deferred()\n def db_ready(db):\n d.callback(db)\n return db\n self._db_ready.addCallback(db_ready)\n \n return d\n\n \n def get_combination(self, track_id=None, take_ids=[]):\n # Sanity check arguments\n if (track_id is None\n and len(take_ids) == 0):\n raise Exception(\n \"Getting a combination from the database requires \"+\n \"at least a Track ID or at least one Take ID\"\n )\n\n # Get combination from database.\n # See answers to https://stackoverflow.com/questions/63356820/sql-select-from-many-to-one\n # and https://stackoverflow.com/a/5766293/562930\n def get_combo(cursor):\n if track_id is None:\n assert len(take_ids) > 0\n sql = (\n \"SELECT id\\n\"+\n \"FROM Combinations\\n\"+\n \"WHERE backingTrackId IS NULL\\n\"+\n \" AND id IN\\n\"+\n \" (SELECT combinationId\\n\"+\n \" FROM CombinationsDetail\\n\"+\n \" GROUP BY combinationId\\n\" +\n \" HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0) = ?\".format(\n seq=\",\".join([\"?\"]*len(take_ids))\n )\n )\n cursor.execute(\n sql,\n (*take_ids, len(take_ids))\n )\n \n elif len(take_ids) == 0:\n sql = (\n \"SELECT id\\n\"+\n \"FROM Combinations\\n\"+\n \"WHERE backingTrackId = ?\\n\"+\n \" AND NOT EXISTS\\n\"+\n \" (SELECT * \\n\"+\n \" FROM CombinationsDetail\\n\"+\n \" WHERE combinationId = Combinations.id)\"\n )\n cursor.execute(\n sql,\n (track_id, )\n )\n \n else:\n sql = (\"SELECT id\\n\"+\n \"FROM Combinations\\n\"+\n \"WHERE backingTrackId = ?\\n\"+\n \" AND id IN\\n\"+\n \" (SELECT combinationId\\n\"+\n \" FROM CombinationsDetail\\n\"+\n \" GROUP BY combinationId\\n\" +\n \" HAVING SUM(CASE WHEN takeId IN ({seq}) THEN 1 ELSE 0 END) = ?)\").format(\n seq=\",\".join(['?']*len(take_ids))\n )\n cursor.execute(\n sql,\n (track_id, *take_ids, len(take_ids))\n )\n\n # Although there should be at most only one combo id that\n # matches the track and takes specification, even if there\n # are more than one, we'll just return the first (or None\n # if there aren't any).\n row = cursor.fetchone()\n if row is None:\n return None\n combo_id = row[0]\n return combo_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(get_combo)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_success(data):\n log.info(\"Successfully added combination to database; combination id: \"+str(data))\n return data\n d.addCallback(on_success)\n \n def on_error(error):\n log.error(\"Failed to add combination to the database: \"+str(error))\n raise Exception(\"Failed to add combination to the database\")\n d.addErrback(on_error)\n\n return d\n\n\n def add_combination(self, track_id=None, take_ids=[]):\n \"\"\"Adds combination into database.\n\n Returns combo_id.\n \"\"\"\n log.info(f\"Adding combination to database with track id = {track_id} and take_ids = {take_ids}\")\n # Sanity check arguments\n if (track_id is None\n and len(take_ids) == 0):\n raise Exception(\n \"Adding a combination to the database requires \"+\n \"at least a Track ID or at least one Take ID\"\n )\n\n # Create combination in database\n def add_combo(cursor):\n # Create audio id\n cursor.execute(\"INSERT INTO AudioIdentifiers DEFAULT VALUES\")\n audio_id = cursor.lastrowid\n \n print(\"track_id:\", track_id)\n cursor.execute(\n \"INSERT INTO Combinations (audioId, backingTrackId) VALUES (?, ?)\",\n (audio_id, track_id)\n )\n combo_id = cursor.lastrowid\n\n for take_id in take_ids:\n cursor.execute(\n \"INSERT INTO CombinationsDetail (combinationId, takeId) \"+\n \"VALUES (?,?)\",\n (combo_id, take_id)\n )\n \n return combo_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(add_combo)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_success(data):\n log.info(\"Successfully added combination to database; combination id: \"+str(data))\n return data\n def on_error(error):\n log.error(\"Failed to add combination to the database: \"+str(error))\n raise Exception(\"Failed to add combination to the database\")\n\n d.addCallback(on_success)\n d.addErrback(on_error)\n\n return d\n \n\n def get_track_audio_id(self, track_id):\n \"\"\"Returns track's audio id or None.\"\"\"\n def execute_sql(cursor):\n cursor.execute(\"SELECT audioId FROM BackingTracks WHERE id = ?\",\n (track_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n \n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\"Failed to get audio ID for track id ({track_id}): \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n \n\n def get_take_audio_id(self, take_id):\n \"\"\"Returns take's audio id or None.\"\"\"\n def execute_sql(cursor):\n cursor.execute(\"SELECT audioId FROM Takes WHERE id = ?\",\n (take_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n \n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\"Failed to get audio ID for take id ({take_id}): \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n\n \n def assign_participant(self, client_id, name):\n \"\"\"Assigns the name to the client id.\"\"\"\n\n def execute_sql(cursor):\n # First, check if the id already exists\n cursor.execute(\n \"SELECT participantName FROM Participants WHERE id = ?\",\n (client_id,)\n )\n row = cursor.fetchone()\n if row is None:\n # We don't currently have this ID, insert it\n cursor.execute(\n \"INSERT INTO Participants (id, participantName) \"+\n \"VALUES (?, ?)\",\n (client_id, name)\n )\n return client_id\n\n # Otherwise, a row does already exist\n current_name = row[0]\n if name == current_name:\n # We have nothing to do, the database is already\n # correct\n return client_id\n\n # Otherwise, we need to update the database\n cursor.execute(\n \"UPDATE Participants SET participantName = ? WHERE id = ?\",\n (name, client_id)\n )\n return client_id\n \n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add participant given name '{name}' and id '{client_id}': \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n\n \n def get_participants(self):\n def execute_sql(cursor):\n cursor.execute(\"SELECT id, participantName FROM Participants\")\n rows = cursor.fetchall()\n results = [{\"id\":id_, \"name\":name} for id_, name in rows]\n return results\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to get participant list: \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n \n def get_audio_ids_from_combination_id(self, combination_id):\n def execute_sql(cursor):\n # Get Track ID. There should be either zero or one, but\n # not more.\n cursor.execute(\n \"SELECT BackingTracks.audioId\\n\"+\n \"FROM Combinations\\n\"+\n \"LEFT JOIN BackingTracks\\n\"+\n \"ON Combinations.backingTrackId = BackingTracks.id\\n\"+\n \"WHERE combinations.id = ?\",\n (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n # We don't have a backing track; that's fine, move on\n # to the takes.\n backing_audio_ids = []\n elif len(rows) == 1:\n # We have one backing track\n row = rows[0]\n audio_id = row[0]\n backing_audio_ids = [audio_id]\n else:\n # We have more than one backing track; error.\n raise Exception(\n f\"More than one backing track matched \"+\n f\"combination id {combination_id}; this \"+\n f\"shouldn't be possible\"\n )\n\n # Get the Take IDs. There may be many of these. But if\n # there wasn't a backing track id, then there needs to be\n # at least one Take ID.\n cursor.execute(\n \"SELECT audioId\\n\"+\n \"FROM CombinationsDetail\\n\"+\n \"LEFT JOIN Takes\\n\"+\n \"ON CombinationsDetail.id = Takes.combinationId\\n\"+\n \"WHERE CombinationsDetail.combinationId = ?\",\n (combination_id,)\n )\n rows = cursor.fetchall()\n if len(rows) == 0:\n # This is only as issue if we don't have any backing\n # tracks either\n if len(backing_audio_ids) == 0:\n raise Exception(\n f\"We have neither a backing track nor takes \"+\n f\"for the given combination id ({combination_id});\"+\n f\"this shouldn't be possible\"\n )\n else:\n # Add the Take IDs to the list \n takes_audio_ids = [row[0] for row in rows]\n backing_audio_ids += takes_audio_ids\n \n return backing_audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to get backing audio ids from combination id: \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n \n def add_take(self, take_name, combination_id):\n def execute_sql(cursor):\n # Create audio id\n cursor.execute(\"INSERT INTO AudioIdentifiers DEFAULT VALUES\")\n audio_id = cursor.lastrowid\n\n # Create take\n cursor.execute(\n \"INSERT INTO Takes (audioId, combinationId, takeName, complete) \"+\n \"VALUES (?, ?, ?, 0)\",\n (audio_id, combination_id, take_name)\n )\n take_id = cursor.lastrowid\n\n return take_id\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add take: \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n \n def add_recording_audio_ids(self, take_id, participants):\n def execute_sql(cursor):\n audio_ids = {}\n for participant_id in participants:\n # Create audio id\n cursor.execute(\"INSERT INTO AudioIdentifiers DEFAULT VALUES\")\n audio_id = cursor.lastrowid\n\n # Add entry into Recordings\n cursor.execute(\n \"INSERT INTO Recordings \"+\n \"(audioId, participantId, takeId, complete) \"+\n \"VALUES (?, ?, ?, 0)\",\n (audio_id, participant_id, take_id)\n )\n \n audio_ids[participant_id] = audio_id\n return audio_ids\n\n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\n \"Failed to add recordings for participants: \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d\n\n",
"step-ids": [
8,
9,
12,
15,
16
]
}
|
[
8,
9,
12,
15,
16
] |
"""
util - other functions
"""
import torch
import numpy as np
from common_labelme import Config
from torch.autograd import Variable
I = torch.FloatTensor(np.eye(Config.batch_size),)
E = torch.FloatTensor(np.ones((Config.batch_size, Config.batch_size)))
normalize_1 = Config.batch_size
normalize_2 = Config.batch_size * Config.batch_size - Config.batch_size
def mig_loss_function(output1, output2, p):
new_output = output1 / p
m = (new_output @ output2.transpose(1,0))
noise = torch.rand(1)*0.0001
m1 = torch.log(m*I+ I*noise + E - I)
m2 = m*(E-I)
return -(sum(sum(m1)) + Config.batch_size)/normalize_1 + sum(sum(m2)) / normalize_2
def tvd_loss_function(output1, output2, p):
new_output = output1 / p
m = (new_output @ output2.transpose(1,0))
noise = torch.rand(1)*0.0001
m1 = torch.log(m*I + I * noise + E - I)
m2 = torch.log(m*(E-I) + I )
return -(sum(sum(torch.sign(m1))))/normalize_1 + sum(sum(torch.sign(m2))) / normalize_2
def pearson_loss_function(output1, output2, p):
new_output = output1 / p
m = (new_output @ output2.transpose(1,0))
m1 = m*I
m2 = m*(E-I)
m2 = m2*m2
return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum(sum(m2)) - normalize_2) / normalize_2
def reverse_kl_loss_function(output1, output2, p):
new_output = output1 / p
m = (new_output @ output2.transpose(1,0))
m1 = m*I
m1 = -I/(m1.float() + E - I)
m2 = torch.log(m*(E-I) + I)
return -(sum(sum(m1)))/normalize_1 + (-sum(sum(m2)) - normalize_2) / normalize_2
def sh_loss_function(output1, output2, p):
new_output = output1 / p
m = (new_output @ output2.transpose(1,0))
m1 = m*I
m1 = torch.sqrt(I/(m1.float() + E - I))
m2 = torch.sqrt(m*(E-I))
return -(-sum(sum(m1)) + Config.batch_size)/normalize_1 + sum(sum(m2)) / normalize_2
def entropy_loss(outputs):
num = outputs.size()[0]
temp = -outputs * torch.log(outputs+0.0001)
loss = torch.sum(temp)
loss /= num
return loss
def M_step(expert_label,mu):
#---------------------------------------------------------------#
# #
# expert_label size : batch_size * expert_num #
# mu : batch_size * num_classes #
# expert_parameters = expert_num * num_classes * num_classes #
# #
#---------------------------------------------------------------#
if not Config.missing:
normalize = torch.sum(mu, 0).float()
expert_label = expert_label.long()
expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))
for i in range(mu.size()[0]):
for R in range(Config.expert_num):
expert_parameters[R, :, expert_label[i, R]] += mu[i].float()
expert_parameters = expert_parameters / normalize.unsqueeze(1)
else:
normalize = torch.zeros(Config.expert_num,Config.num_classes)
expert_label = expert_label.long()
expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))
for i in range(mu.size()[0]):
for R in range(Config.expert_num):
if expert_label[i,R] < 0:
continue
expert_parameters[R, :, expert_label[i, R]] += mu[i].float()
normalize[R] += mu[i].float()
normalize = normalize + 1 * (normalize == 0).float()
for R in range(Config.expert_num):
expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)
expert_parameters = expert_parameters.cuda()
return expert_parameters
def M_step_p_mbem(t):
p = torch.zeros(Config.num_classes)
t = t.long()
for i in range(t.size(0)):
p[t[i]] += 1
p /= t.size()[0]
return p
def M_step_mbem(expert_label,t):
#---------------------------------------------------------------#
# #
# expert_label size : batch_size * expert_num #
# t : batch_size #
# expert_parameters = expert_num * num_classes * num_classes #
# #
#---------------------------------------------------------------#
normalize = torch.zeros(Config.expert_num, Config.num_classes)
expert_label = expert_label.long()
t = t.long()
expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))
for i in range(t.size()[0]):
for R in range(Config.expert_num):
if expert_label[i, R] < 0:
continue
expert_parameters[R, t[i], expert_label[i, R]] += 1
normalize[R,t[i]] += 1
normalize = normalize + 1 * (normalize == 0).float()
for R in range(Config.expert_num):
expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)
expert_parameters = expert_parameters.cuda()
return expert_parameters
def print_recons_result(right_model, confusion_matrix):
confusion_loss = 0
for i in range(1,len(list(right_model.parameters()))):
para = list(right_model.parameters())[i].detach().cpu()
#print("Expert %d" %i)
local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)
#print(local_confusion_matrix)
residual_matrix = local_confusion_matrix - confusion_matrix[i-1, :, :]
residual = torch.sum(abs(residual_matrix))
confusion_loss += residual
print("Total variation:", confusion_loss.item())
def initial_priori(train_loader):
p = torch.zeros((Config.num_classes))
total = 0
for batch_idx, (left_data, right_data, label) in enumerate(train_loader):
linear_sum = torch.sum(right_data, dim=1)
_, majority = torch.max(linear_sum, 1)
majority = Variable(majority).long()
total += label.size()[0]
for i in range(Config.num_classes):
p[i] += torch.sum(majority == i).float()
p = p/float(total)
return p
def update_priori(model, train_loader):
# waiting for solution
p = torch.zeros((Config.num_classes))
# updating priori by posteri
total = 0
for batch_idx, (left_data, right_data, label) in enumerate(train_loader):
ep = Variable(right_data).float().cuda()
images = Variable(left_data).float().cuda()
outputs = model(images)
_, predicts = torch.max(outputs.data, 1)
total += ep.size()[0]
predicts = predicts.detach().cpu()
for i in range(Config.num_classes):
p[i] += torch.sum(predicts == i).float()
p = p/float(total)
'''
# updating priori by loss
pri = priori
pri = Variable(pri, requires_grad=True)
loss = mig_loss_function(left_outputs.detach(),right_outputs.detach(),p)
loss.backward()
grad = pri.grad
pri = pri.detach() - Config.alpha * grad
pri = torch.exp(pri)
pri = pri / torch.sum(pri)
'''
'''
# true priori
p[0] = 0.5
p[1] = 0.5
'''
return p
|
normal
|
{
"blob_id": "be9179b33991ba743e6e6b7d5dd4dc85ffc09fc3",
"index": 6331,
"step-1": "<mask token>\n\n\ndef mig_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = m * (E - I)\n return -(sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef tvd_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(torch.sign(m1))) / normalize_1 + sum(sum(torch.sign(m2))\n ) / normalize_2\n\n\ndef pearson_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m2 = m * (E - I)\n m2 = m2 * m2\n return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum\n (sum(m2)) - normalize_2) / normalize_2\n\n\ndef reverse_kl_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = -I / (m1.float() + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(m1)) / normalize_1 + (-sum(sum(m2)) - normalize_2\n ) / normalize_2\n\n\ndef sh_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = torch.sqrt(I / (m1.float() + E - I))\n m2 = torch.sqrt(m * (E - I))\n return -(-sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef entropy_loss(outputs):\n num = outputs.size()[0]\n temp = -outputs * torch.log(outputs + 0.0001)\n loss = torch.sum(temp)\n loss /= num\n return loss\n\n\ndef M_step(expert_label, mu):\n if not Config.missing:\n normalize = torch.sum(mu, 0).float()\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n expert_parameters = expert_parameters / normalize.unsqueeze(1)\n else:\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n normalize[R] += mu[i].float()\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R\n ].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef M_step_p_mbem(t):\n p = torch.zeros(Config.num_classes)\n t = t.long()\n for i in range(t.size(0)):\n p[t[i]] += 1\n p /= t.size()[0]\n return p\n\n\ndef M_step_mbem(expert_label, t):\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n t = t.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes,\n Config.num_classes))\n for i in range(t.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, t[i], expert_label[i, R]] += 1\n normalize[R, t[i]] += 1\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef print_recons_result(right_model, confusion_matrix):\n confusion_loss = 0\n for i in range(1, len(list(right_model.parameters()))):\n para = list(right_model.parameters())[i].detach().cpu()\n local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)\n residual_matrix = local_confusion_matrix - confusion_matrix[i - 1, :, :\n ]\n residual = torch.sum(abs(residual_matrix))\n confusion_loss += residual\n print('Total variation:', confusion_loss.item())\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef mig_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = m * (E - I)\n return -(sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef tvd_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(torch.sign(m1))) / normalize_1 + sum(sum(torch.sign(m2))\n ) / normalize_2\n\n\ndef pearson_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m2 = m * (E - I)\n m2 = m2 * m2\n return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum\n (sum(m2)) - normalize_2) / normalize_2\n\n\ndef reverse_kl_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = -I / (m1.float() + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(m1)) / normalize_1 + (-sum(sum(m2)) - normalize_2\n ) / normalize_2\n\n\ndef sh_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = torch.sqrt(I / (m1.float() + E - I))\n m2 = torch.sqrt(m * (E - I))\n return -(-sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef entropy_loss(outputs):\n num = outputs.size()[0]\n temp = -outputs * torch.log(outputs + 0.0001)\n loss = torch.sum(temp)\n loss /= num\n return loss\n\n\ndef M_step(expert_label, mu):\n if not Config.missing:\n normalize = torch.sum(mu, 0).float()\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n expert_parameters = expert_parameters / normalize.unsqueeze(1)\n else:\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n normalize[R] += mu[i].float()\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R\n ].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef M_step_p_mbem(t):\n p = torch.zeros(Config.num_classes)\n t = t.long()\n for i in range(t.size(0)):\n p[t[i]] += 1\n p /= t.size()[0]\n return p\n\n\ndef M_step_mbem(expert_label, t):\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n t = t.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes,\n Config.num_classes))\n for i in range(t.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, t[i], expert_label[i, R]] += 1\n normalize[R, t[i]] += 1\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef print_recons_result(right_model, confusion_matrix):\n confusion_loss = 0\n for i in range(1, len(list(right_model.parameters()))):\n para = list(right_model.parameters())[i].detach().cpu()\n local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)\n residual_matrix = local_confusion_matrix - confusion_matrix[i - 1, :, :\n ]\n residual = torch.sum(abs(residual_matrix))\n confusion_loss += residual\n print('Total variation:', confusion_loss.item())\n\n\ndef initial_priori(train_loader):\n p = torch.zeros(Config.num_classes)\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n linear_sum = torch.sum(right_data, dim=1)\n _, majority = torch.max(linear_sum, 1)\n majority = Variable(majority).long()\n total += label.size()[0]\n for i in range(Config.num_classes):\n p[i] += torch.sum(majority == i).float()\n p = p / float(total)\n return p\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef mig_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = m * (E - I)\n return -(sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef tvd_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(torch.sign(m1))) / normalize_1 + sum(sum(torch.sign(m2))\n ) / normalize_2\n\n\ndef pearson_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m2 = m * (E - I)\n m2 = m2 * m2\n return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum\n (sum(m2)) - normalize_2) / normalize_2\n\n\ndef reverse_kl_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = -I / (m1.float() + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(m1)) / normalize_1 + (-sum(sum(m2)) - normalize_2\n ) / normalize_2\n\n\ndef sh_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = torch.sqrt(I / (m1.float() + E - I))\n m2 = torch.sqrt(m * (E - I))\n return -(-sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef entropy_loss(outputs):\n num = outputs.size()[0]\n temp = -outputs * torch.log(outputs + 0.0001)\n loss = torch.sum(temp)\n loss /= num\n return loss\n\n\ndef M_step(expert_label, mu):\n if not Config.missing:\n normalize = torch.sum(mu, 0).float()\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n expert_parameters = expert_parameters / normalize.unsqueeze(1)\n else:\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n normalize[R] += mu[i].float()\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R\n ].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef M_step_p_mbem(t):\n p = torch.zeros(Config.num_classes)\n t = t.long()\n for i in range(t.size(0)):\n p[t[i]] += 1\n p /= t.size()[0]\n return p\n\n\ndef M_step_mbem(expert_label, t):\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n t = t.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes,\n Config.num_classes))\n for i in range(t.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, t[i], expert_label[i, R]] += 1\n normalize[R, t[i]] += 1\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef print_recons_result(right_model, confusion_matrix):\n confusion_loss = 0\n for i in range(1, len(list(right_model.parameters()))):\n para = list(right_model.parameters())[i].detach().cpu()\n local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)\n residual_matrix = local_confusion_matrix - confusion_matrix[i - 1, :, :\n ]\n residual = torch.sum(abs(residual_matrix))\n confusion_loss += residual\n print('Total variation:', confusion_loss.item())\n\n\ndef initial_priori(train_loader):\n p = torch.zeros(Config.num_classes)\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n linear_sum = torch.sum(right_data, dim=1)\n _, majority = torch.max(linear_sum, 1)\n majority = Variable(majority).long()\n total += label.size()[0]\n for i in range(Config.num_classes):\n p[i] += torch.sum(majority == i).float()\n p = p / float(total)\n return p\n\n\ndef update_priori(model, train_loader):\n p = torch.zeros(Config.num_classes)\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n ep = Variable(right_data).float().cuda()\n images = Variable(left_data).float().cuda()\n outputs = model(images)\n _, predicts = torch.max(outputs.data, 1)\n total += ep.size()[0]\n predicts = predicts.detach().cpu()\n for i in range(Config.num_classes):\n p[i] += torch.sum(predicts == i).float()\n p = p / float(total)\n \"\"\"\n # updating priori by loss\n pri = priori\n pri = Variable(pri, requires_grad=True)\n loss = mig_loss_function(left_outputs.detach(),right_outputs.detach(),p)\n loss.backward()\n grad = pri.grad\n pri = pri.detach() - Config.alpha * grad\n pri = torch.exp(pri)\n pri = pri / torch.sum(pri)\n \n \"\"\"\n \"\"\"\n # true priori\n p[0] = 0.5\n p[1] = 0.5\n \"\"\"\n return p\n",
"step-4": "<mask token>\nimport torch\nimport numpy as np\nfrom common_labelme import Config\nfrom torch.autograd import Variable\nI = torch.FloatTensor(np.eye(Config.batch_size))\nE = torch.FloatTensor(np.ones((Config.batch_size, Config.batch_size)))\nnormalize_1 = Config.batch_size\nnormalize_2 = Config.batch_size * Config.batch_size - Config.batch_size\n\n\ndef mig_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = m * (E - I)\n return -(sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef tvd_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n noise = torch.rand(1) * 0.0001\n m1 = torch.log(m * I + I * noise + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(torch.sign(m1))) / normalize_1 + sum(sum(torch.sign(m2))\n ) / normalize_2\n\n\ndef pearson_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m2 = m * (E - I)\n m2 = m2 * m2\n return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum\n (sum(m2)) - normalize_2) / normalize_2\n\n\ndef reverse_kl_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = -I / (m1.float() + E - I)\n m2 = torch.log(m * (E - I) + I)\n return -sum(sum(m1)) / normalize_1 + (-sum(sum(m2)) - normalize_2\n ) / normalize_2\n\n\ndef sh_loss_function(output1, output2, p):\n new_output = output1 / p\n m = new_output @ output2.transpose(1, 0)\n m1 = m * I\n m1 = torch.sqrt(I / (m1.float() + E - I))\n m2 = torch.sqrt(m * (E - I))\n return -(-sum(sum(m1)) + Config.batch_size) / normalize_1 + sum(sum(m2)\n ) / normalize_2\n\n\ndef entropy_loss(outputs):\n num = outputs.size()[0]\n temp = -outputs * torch.log(outputs + 0.0001)\n loss = torch.sum(temp)\n loss /= num\n return loss\n\n\ndef M_step(expert_label, mu):\n if not Config.missing:\n normalize = torch.sum(mu, 0).float()\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n expert_parameters = expert_parameters / normalize.unsqueeze(1)\n else:\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.\n num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n normalize[R] += mu[i].float()\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R\n ].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef M_step_p_mbem(t):\n p = torch.zeros(Config.num_classes)\n t = t.long()\n for i in range(t.size(0)):\n p[t[i]] += 1\n p /= t.size()[0]\n return p\n\n\ndef M_step_mbem(expert_label, t):\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n t = t.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes,\n Config.num_classes))\n for i in range(t.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, t[i], expert_label[i, R]] += 1\n normalize[R, t[i]] += 1\n normalize = normalize + 1 * (normalize == 0).float()\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef print_recons_result(right_model, confusion_matrix):\n confusion_loss = 0\n for i in range(1, len(list(right_model.parameters()))):\n para = list(right_model.parameters())[i].detach().cpu()\n local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)\n residual_matrix = local_confusion_matrix - confusion_matrix[i - 1, :, :\n ]\n residual = torch.sum(abs(residual_matrix))\n confusion_loss += residual\n print('Total variation:', confusion_loss.item())\n\n\ndef initial_priori(train_loader):\n p = torch.zeros(Config.num_classes)\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n linear_sum = torch.sum(right_data, dim=1)\n _, majority = torch.max(linear_sum, 1)\n majority = Variable(majority).long()\n total += label.size()[0]\n for i in range(Config.num_classes):\n p[i] += torch.sum(majority == i).float()\n p = p / float(total)\n return p\n\n\ndef update_priori(model, train_loader):\n p = torch.zeros(Config.num_classes)\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n ep = Variable(right_data).float().cuda()\n images = Variable(left_data).float().cuda()\n outputs = model(images)\n _, predicts = torch.max(outputs.data, 1)\n total += ep.size()[0]\n predicts = predicts.detach().cpu()\n for i in range(Config.num_classes):\n p[i] += torch.sum(predicts == i).float()\n p = p / float(total)\n \"\"\"\n # updating priori by loss\n pri = priori\n pri = Variable(pri, requires_grad=True)\n loss = mig_loss_function(left_outputs.detach(),right_outputs.detach(),p)\n loss.backward()\n grad = pri.grad\n pri = pri.detach() - Config.alpha * grad\n pri = torch.exp(pri)\n pri = pri / torch.sum(pri)\n \n \"\"\"\n \"\"\"\n # true priori\n p[0] = 0.5\n p[1] = 0.5\n \"\"\"\n return p\n",
"step-5": "\"\"\"\nutil - other functions\n\"\"\"\nimport torch\nimport numpy as np\nfrom common_labelme import Config\nfrom torch.autograd import Variable\n\nI = torch.FloatTensor(np.eye(Config.batch_size),)\nE = torch.FloatTensor(np.ones((Config.batch_size, Config.batch_size)))\nnormalize_1 = Config.batch_size\nnormalize_2 = Config.batch_size * Config.batch_size - Config.batch_size\n\ndef mig_loss_function(output1, output2, p):\n new_output = output1 / p\n m = (new_output @ output2.transpose(1,0))\n noise = torch.rand(1)*0.0001\n m1 = torch.log(m*I+ I*noise + E - I)\n m2 = m*(E-I)\n return -(sum(sum(m1)) + Config.batch_size)/normalize_1 + sum(sum(m2)) / normalize_2\n\ndef tvd_loss_function(output1, output2, p):\n new_output = output1 / p\n m = (new_output @ output2.transpose(1,0))\n noise = torch.rand(1)*0.0001\n m1 = torch.log(m*I + I * noise + E - I)\n m2 = torch.log(m*(E-I) + I )\n\n return -(sum(sum(torch.sign(m1))))/normalize_1 + sum(sum(torch.sign(m2))) / normalize_2\n\ndef pearson_loss_function(output1, output2, p):\n new_output = output1 / p\n m = (new_output @ output2.transpose(1,0))\n\n m1 = m*I\n m2 = m*(E-I)\n m2 = m2*m2\n return -(2 * sum(sum(m1)) - 2 * Config.batch_size) / normalize_1 + (sum(sum(m2)) - normalize_2) / normalize_2\n\ndef reverse_kl_loss_function(output1, output2, p):\n new_output = output1 / p\n m = (new_output @ output2.transpose(1,0))\n m1 = m*I\n m1 = -I/(m1.float() + E - I)\n m2 = torch.log(m*(E-I) + I)\n return -(sum(sum(m1)))/normalize_1 + (-sum(sum(m2)) - normalize_2) / normalize_2\n\ndef sh_loss_function(output1, output2, p):\n new_output = output1 / p\n m = (new_output @ output2.transpose(1,0))\n m1 = m*I\n m1 = torch.sqrt(I/(m1.float() + E - I))\n m2 = torch.sqrt(m*(E-I))\n return -(-sum(sum(m1)) + Config.batch_size)/normalize_1 + sum(sum(m2)) / normalize_2\n\ndef entropy_loss(outputs):\n num = outputs.size()[0]\n temp = -outputs * torch.log(outputs+0.0001)\n loss = torch.sum(temp)\n loss /= num\n return loss\n\ndef M_step(expert_label,mu):\n\n #---------------------------------------------------------------#\n # #\n # expert_label size : batch_size * expert_num #\n # mu : batch_size * num_classes #\n # expert_parameters = expert_num * num_classes * num_classes #\n # #\n #---------------------------------------------------------------#\n\n if not Config.missing:\n normalize = torch.sum(mu, 0).float()\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n\n expert_parameters = expert_parameters / normalize.unsqueeze(1)\n else:\n normalize = torch.zeros(Config.expert_num,Config.num_classes)\n expert_label = expert_label.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))\n for i in range(mu.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i,R] < 0:\n continue\n expert_parameters[R, :, expert_label[i, R]] += mu[i].float()\n normalize[R] += mu[i].float()\n\n normalize = normalize + 1 * (normalize == 0).float()\n\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\ndef M_step_p_mbem(t):\n\n p = torch.zeros(Config.num_classes)\n t = t.long()\n for i in range(t.size(0)):\n p[t[i]] += 1\n p /= t.size()[0]\n return p\n\n\ndef M_step_mbem(expert_label,t):\n\n #---------------------------------------------------------------#\n # #\n # expert_label size : batch_size * expert_num #\n # t : batch_size #\n # expert_parameters = expert_num * num_classes * num_classes #\n # #\n #---------------------------------------------------------------#\n\n normalize = torch.zeros(Config.expert_num, Config.num_classes)\n expert_label = expert_label.long()\n t = t.long()\n expert_parameters = torch.zeros((Config.expert_num, Config.num_classes, Config.num_classes))\n\n\n for i in range(t.size()[0]):\n for R in range(Config.expert_num):\n if expert_label[i, R] < 0:\n continue\n expert_parameters[R, t[i], expert_label[i, R]] += 1\n normalize[R,t[i]] += 1\n normalize = normalize + 1 * (normalize == 0).float()\n\n for R in range(Config.expert_num):\n expert_parameters[R] = expert_parameters[R] / normalize[R].unsqueeze(1)\n\n expert_parameters = expert_parameters.cuda()\n return expert_parameters\n\n\ndef print_recons_result(right_model, confusion_matrix):\n\n confusion_loss = 0\n for i in range(1,len(list(right_model.parameters()))):\n para = list(right_model.parameters())[i].detach().cpu()\n #print(\"Expert %d\" %i)\n local_confusion_matrix = torch.nn.functional.softmax(para, dim=1)\n #print(local_confusion_matrix)\n residual_matrix = local_confusion_matrix - confusion_matrix[i-1, :, :]\n residual = torch.sum(abs(residual_matrix))\n confusion_loss += residual\n\n print(\"Total variation:\", confusion_loss.item())\n\ndef initial_priori(train_loader):\n p = torch.zeros((Config.num_classes))\n\n\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n linear_sum = torch.sum(right_data, dim=1)\n _, majority = torch.max(linear_sum, 1)\n majority = Variable(majority).long()\n total += label.size()[0]\n for i in range(Config.num_classes):\n p[i] += torch.sum(majority == i).float()\n p = p/float(total)\n return p\n\ndef update_priori(model, train_loader):\n # waiting for solution\n p = torch.zeros((Config.num_classes))\n\n # updating priori by posteri\n\n total = 0\n for batch_idx, (left_data, right_data, label) in enumerate(train_loader):\n ep = Variable(right_data).float().cuda()\n images = Variable(left_data).float().cuda()\n outputs = model(images)\n _, predicts = torch.max(outputs.data, 1)\n total += ep.size()[0]\n predicts = predicts.detach().cpu()\n for i in range(Config.num_classes):\n p[i] += torch.sum(predicts == i).float()\n\n p = p/float(total)\n '''\n # updating priori by loss\n pri = priori\n pri = Variable(pri, requires_grad=True)\n loss = mig_loss_function(left_outputs.detach(),right_outputs.detach(),p)\n loss.backward()\n grad = pri.grad\n pri = pri.detach() - Config.alpha * grad\n pri = torch.exp(pri)\n pri = pri / torch.sum(pri)\n \n '''\n\n '''\n # true priori\n p[0] = 0.5\n p[1] = 0.5\n '''\n return p",
"step-ids": [
10,
11,
12,
14,
15
]
}
|
[
10,
11,
12,
14,
15
] |
#
#River Sheppard
#
#
from PIL import Image
if __name__ == "__main__":
scale = 768
# creating the new image in RGB mode
bitmap = Image.new("RGB", (scale, scale), "white")
# Allocating the storage for the image and
# loading the pixel data.
pix = bitmap.load()
# setting up the variables according to
# the equation to create the fractal
c = complex(-0.585, 0.85)
move = 0.0
maxIter = 255
for x in range(scale):
for y in range(scale):
zx = 1.5*(x - scale/2)/(0.5*scale) + move
zy = 1.0*(y - scale/2)/(0.5*scale) + move
z = complex(zx,zy)
i = maxIter
while abs(z*z) < 4 and i > 1:
z = z**2 + c
i -= 1
# convert byte to RGB (3 bytes), kinda
# magic to get nice colors
pix[x,y] = (i << 21) + (i << 10) + i*8
# to display the created fractal
bitmap.show()
|
normal
|
{
"blob_id": "507251113d80eaa3684081f7814470053b04dda9",
"index": 1436,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n scale = 768\n bitmap = Image.new('RGB', (scale, scale), 'white')\n pix = bitmap.load()\n c = complex(-0.585, 0.85)\n move = 0.0\n maxIter = 255\n for x in range(scale):\n for y in range(scale):\n zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move\n zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move\n z = complex(zx, zy)\n i = maxIter\n while abs(z * z) < 4 and i > 1:\n z = z ** 2 + c\n i -= 1\n pix[x, y] = (i << 21) + (i << 10) + i * 8\n bitmap.show()\n",
"step-3": "from PIL import Image\nif __name__ == '__main__':\n scale = 768\n bitmap = Image.new('RGB', (scale, scale), 'white')\n pix = bitmap.load()\n c = complex(-0.585, 0.85)\n move = 0.0\n maxIter = 255\n for x in range(scale):\n for y in range(scale):\n zx = 1.5 * (x - scale / 2) / (0.5 * scale) + move\n zy = 1.0 * (y - scale / 2) / (0.5 * scale) + move\n z = complex(zx, zy)\n i = maxIter\n while abs(z * z) < 4 and i > 1:\n z = z ** 2 + c\n i -= 1\n pix[x, y] = (i << 21) + (i << 10) + i * 8\n bitmap.show()\n",
"step-4": "#\r\n#River Sheppard\r\n#\r\n#\r\n\r\nfrom PIL import Image\r\n\r\nif __name__ == \"__main__\":\r\n scale = 768\r\n \r\n # creating the new image in RGB mode\r\n bitmap = Image.new(\"RGB\", (scale, scale), \"white\")\r\n \r\n # Allocating the storage for the image and\r\n # loading the pixel data.\r\n pix = bitmap.load()\r\n \r\n # setting up the variables according to \r\n # the equation to create the fractal\r\n c = complex(-0.585, 0.85)\r\n move = 0.0\r\n maxIter = 255\r\n \r\n for x in range(scale):\r\n for y in range(scale):\r\n zx = 1.5*(x - scale/2)/(0.5*scale) + move\r\n zy = 1.0*(y - scale/2)/(0.5*scale) + move\r\n z = complex(zx,zy)\r\n i = maxIter\r\n while abs(z*z) < 4 and i > 1:\r\n z = z**2 + c\r\n i -= 1\r\n \r\n # convert byte to RGB (3 bytes), kinda \r\n # magic to get nice colors\r\n pix[x,y] = (i << 21) + (i << 10) + i*8\r\n \r\n # to display the created fractal\r\n bitmap.show()\r\n \r\n \r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
"""
Class: Dataset
This class is responsible of loading datasets
After initializing using load method the class results two parameter:
train: contains train set
test: contains test set
It's able of returning data structure in form of three lists:
- users
- items
- values (which are ratings)
"""
import pandas as pd
from Ratings import Ratings
class DatasetLoader(object):
# Default path where dataset files are located
base_path = './dataset/'
def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):
if ds_columns is None:
columns = ['user_id', 'item_id', 'values', 'timestamp']
else:
columns = ds_columns
self.id = ds_id
self.name = ds_name
self.desc = ds_desc
train_path = self.base_path + self.name + str(self.id) + '.base'
test_path = self.base_path + self.name + str(self.id) + '.test'
self.train = pd.read_csv(train_path, header=None, delim_whitespace=True)
self.train.columns = columns
self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)
self.test.columns = columns
self.train_ratings = Ratings(self.to_lists(self.train))
self.test_ratings = Ratings(self.to_lists(self.test))
def to_lists(self, ds):
"""
:param ds_type: str [train || test]
:return: dataset in form of three list saved in a dict {users:u, items:i, values:v}
"""
#ds = getattr(self, ds_type)
lists = {
'users': ds['user_id'].values,
'items': ds['item_id'].values,
'values': ds['values'].values
}
return lists
def __str__(self):
return f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. \
train size: {len(self.train)}, test size: {len(self.test)}'
# Testing Area
# m_lens = Loader(2, 'u', 'MovieLens dataset, fold 1')
# print(len(m_lens.train))
# print(len(m_lens.test))
# print(m_lens)
|
normal
|
{
"blob_id": "b668945820abe893b92fdf26ccd8563ccff804ee",
"index": 1981,
"step-1": "<mask token>\n\n\nclass DatasetLoader(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DatasetLoader(object):\n <mask token>\n\n def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):\n if ds_columns is None:\n columns = ['user_id', 'item_id', 'values', 'timestamp']\n else:\n columns = ds_columns\n self.id = ds_id\n self.name = ds_name\n self.desc = ds_desc\n train_path = self.base_path + self.name + str(self.id) + '.base'\n test_path = self.base_path + self.name + str(self.id) + '.test'\n self.train = pd.read_csv(train_path, header=None, delim_whitespace=True\n )\n self.train.columns = columns\n self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)\n self.test.columns = columns\n self.train_ratings = Ratings(self.to_lists(self.train))\n self.test_ratings = Ratings(self.to_lists(self.test))\n\n def to_lists(self, ds):\n \"\"\"\n :param ds_type: str [train || test]\n :return: dataset in form of three list saved in a dict {users:u, items:i, values:v}\n \"\"\"\n lists = {'users': ds['user_id'].values, 'items': ds['item_id'].\n values, 'values': ds['values'].values}\n return lists\n\n def __str__(self):\n return (\n f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'\n )\n",
"step-3": "<mask token>\n\n\nclass DatasetLoader(object):\n base_path = './dataset/'\n\n def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):\n if ds_columns is None:\n columns = ['user_id', 'item_id', 'values', 'timestamp']\n else:\n columns = ds_columns\n self.id = ds_id\n self.name = ds_name\n self.desc = ds_desc\n train_path = self.base_path + self.name + str(self.id) + '.base'\n test_path = self.base_path + self.name + str(self.id) + '.test'\n self.train = pd.read_csv(train_path, header=None, delim_whitespace=True\n )\n self.train.columns = columns\n self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)\n self.test.columns = columns\n self.train_ratings = Ratings(self.to_lists(self.train))\n self.test_ratings = Ratings(self.to_lists(self.test))\n\n def to_lists(self, ds):\n \"\"\"\n :param ds_type: str [train || test]\n :return: dataset in form of three list saved in a dict {users:u, items:i, values:v}\n \"\"\"\n lists = {'users': ds['user_id'].values, 'items': ds['item_id'].\n values, 'values': ds['values'].values}\n return lists\n\n def __str__(self):\n return (\n f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'\n )\n",
"step-4": "<mask token>\nimport pandas as pd\nfrom Ratings import Ratings\n\n\nclass DatasetLoader(object):\n base_path = './dataset/'\n\n def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):\n if ds_columns is None:\n columns = ['user_id', 'item_id', 'values', 'timestamp']\n else:\n columns = ds_columns\n self.id = ds_id\n self.name = ds_name\n self.desc = ds_desc\n train_path = self.base_path + self.name + str(self.id) + '.base'\n test_path = self.base_path + self.name + str(self.id) + '.test'\n self.train = pd.read_csv(train_path, header=None, delim_whitespace=True\n )\n self.train.columns = columns\n self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)\n self.test.columns = columns\n self.train_ratings = Ratings(self.to_lists(self.train))\n self.test_ratings = Ratings(self.to_lists(self.test))\n\n def to_lists(self, ds):\n \"\"\"\n :param ds_type: str [train || test]\n :return: dataset in form of three list saved in a dict {users:u, items:i, values:v}\n \"\"\"\n lists = {'users': ds['user_id'].values, 'items': ds['item_id'].\n values, 'values': ds['values'].values}\n return lists\n\n def __str__(self):\n return (\n f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. train size: {len(self.train)}, test size: {len(self.test)}'\n )\n",
"step-5": "\"\"\"\nClass: Dataset\n\nThis class is responsible of loading datasets\n\nAfter initializing using load method the class results two parameter:\n train: contains train set\n test: contains test set\n\nIt's able of returning data structure in form of three lists:\n - users\n - items\n - values (which are ratings)\n\"\"\"\n\nimport pandas as pd\nfrom Ratings import Ratings\n\n\nclass DatasetLoader(object):\n\n # Default path where dataset files are located\n base_path = './dataset/'\n\n def __init__(self, ds_id, ds_name, ds_desc, ds_columns=None):\n\n if ds_columns is None:\n columns = ['user_id', 'item_id', 'values', 'timestamp']\n else:\n columns = ds_columns\n\n self.id = ds_id\n self.name = ds_name\n self.desc = ds_desc\n\n train_path = self.base_path + self.name + str(self.id) + '.base'\n test_path = self.base_path + self.name + str(self.id) + '.test'\n\n self.train = pd.read_csv(train_path, header=None, delim_whitespace=True)\n self.train.columns = columns\n\n self.test = pd.read_csv(test_path, header=None, delim_whitespace=True)\n self.test.columns = columns\n\n self.train_ratings = Ratings(self.to_lists(self.train))\n self.test_ratings = Ratings(self.to_lists(self.test))\n\n def to_lists(self, ds):\n \"\"\"\n :param ds_type: str [train || test]\n :return: dataset in form of three list saved in a dict {users:u, items:i, values:v}\n \"\"\"\n #ds = getattr(self, ds_type)\n\n lists = {\n 'users': ds['user_id'].values,\n 'items': ds['item_id'].values,\n 'values': ds['values'].values\n }\n\n return lists\n\n def __str__(self):\n return f'Dataset Id: {self.id}, File Name: {self.name}, Description: {self.desc}. \\\n train size: {len(self.train)}, test size: {len(self.test)}'\n\n\n# Testing Area\n# m_lens = Loader(2, 'u', 'MovieLens dataset, fold 1')\n# print(len(m_lens.train))\n# print(len(m_lens.test))\n# print(m_lens)\n",
"step-ids": [
1,
4,
5,
6,
7
]
}
|
[
1,
4,
5,
6,
7
] |
#!/usr/local/bin/python
import requests as rq
import sqlite3 as sq
from dateutil import parser
import datetime
import pytz
import json
from os.path import expanduser
import shutil
from os.path import isfile
import time
#FRED Config
urls = {'FRED':"http://api.stlouisfed.org/fred"}
urls['FRED_SER'] = urls['FRED'] + "/series"
urls['FRED_OBS'] = urls['FRED_SER'] + "/observations"
api_key = "fc359838e2193d76d75f8a850c41fbd7"
args = {"api_key":api_key, "series_id":0, "file_type":"json", "frequency":"sa", "aggregation_method" : "avg"} #initial arguments for FRED requests
home = expanduser("~")
#change this DB location
#db = "/Volumes/Pylos/Projects/FED/projection.db"
#
bu = home+"/exhibit/unemployment"+str(time.time())+".db"
db = home+"/exhibit/unemployment.db"
if isfile(db):
print "making backup at "+bu
shutil.copyfile(db,bu)
#DB config
#db = 'unemployment.db'
conn = sq.connect(db) #connection is open
conn.row_factory = sq.Row
force = True;
#setup vars
today = datetime.datetime.now()
today = pytz.utc.localize(today);
stamp = today.strftime("%Y-%m-%d %H:%M:%S%z")
#get string date for one decade ago
tmpStamp = today.strftime("%Y-%m-%d")
lDate = tmpStamp.split("-")
lDate[0] = str(int(lDate[0]) - 10);
startDate = datetime.date(int(lDate[0]),int(lDate[1]),int(lDate[2]))
startStr = lDate[0]+"-"+lDate[1]+"-"+lDate[2]
args["observation_start"] = startStr
def get_ids():
c = conn.cursor()
c.execute("SELECT series_id FROM ser_id");
rows = c.fetchall()
return rows
#check that all series are present, and up to date.
def check_series():
if force == True:
delete_rows()
print "Forced, deleting rows"
ids = get_ids() #get all ids from db
#print ids
c = conn.cursor()
for id in ids:
i = (id["series_id"],)
if i[0] != "N/A":
c.execute("SELECT * FROM ser_data WHERE ser_id=?",i)
data = c.fetchone();
if data is None or force == True: #this id is not in db
print('There is no series named %s in database, syncing with FRED...'%i)
create_series(i)
else: #id is found
date_check = check_date(data["date"]) #check if up to date
if date_check:
update_series(i)
def get_series(id):
args["series_id"] = id;
r = rq.get(urls["FRED_SER"], params=args)
j = r.json();
_date = j["seriess"][0]["last_updated"]
return {"series":j, 'date':_date}
def get_obs(id):
args["series_id"] = id;
r = rq.get(urls["FRED_OBS"], params=args)
j = r.json();
_obs = j["observations"]
nullItems = []
for (oi, ob) in enumerate(_obs):
if ob["value"] == ".":
nullItems.append(oi)
print("Null Items found at "+str(oi))
_obs[oi] = "null"
for (ni, nn) in enumerate(nullItems):
_obs.remove("null")
# print _obs
return _obs
def create_series(id):
c = conn.cursor()
obs = get_obs(id)
ser = get_series(id)
date = ser["date"]
ser = ser["series"]
q = (id,ser,obs,date);
c.execute("INSERT INTO ser_data VALUES(?,?,?,?,?)", (stamp,str(id[0]),json.dumps(ser),json.dumps(obs),date))
conn.commit()
def delete_rows():
c = conn.cursor()
c.execute("DELETE FROM ser_data")
conn.commit()
def check_date(d):
data_date = parser.parse(d);
data_utc = data_date.astimezone(pytz.utc);
check = today < data_utc
return check
def update_series(id):
c = conn.cursor()
obs = get_obs(id)
ser = get_series(id)
date = ser["date"]
ser = ser["series"]
q = (id,ser,obs,date);
c.execute("UPDATE ser_data SET series = ?, observations = ?, date = ?, updated = ? WHERE ser_id = ? ", (json.dumps(ser),json.dumps(obs),date,stamp, str(id[0])))
conn.commit();
print("seriess updated")
check_series()
|
normal
|
{
"blob_id": "8dfb1312d82bb10f2376eb726f75a4a596319acb",
"index": 3143,
"step-1": "#!/usr/local/bin/python\nimport requests as rq\nimport sqlite3 as sq\nfrom dateutil import parser\nimport datetime\nimport pytz\nimport json\nfrom os.path import expanduser\nimport shutil\nfrom os.path import isfile\nimport time\n#FRED Config\nurls = {'FRED':\"http://api.stlouisfed.org/fred\"}\nurls['FRED_SER'] = urls['FRED'] + \"/series\"\nurls['FRED_OBS'] = urls['FRED_SER'] + \"/observations\"\napi_key = \"fc359838e2193d76d75f8a850c41fbd7\"\nargs = {\"api_key\":api_key, \"series_id\":0, \"file_type\":\"json\", \"frequency\":\"sa\", \"aggregation_method\" : \"avg\"} #initial arguments for FRED requests\n\n\nhome = expanduser(\"~\")\n#change this DB location\n#db = \"/Volumes/Pylos/Projects/FED/projection.db\"\n#\nbu = home+\"/exhibit/unemployment\"+str(time.time())+\".db\"\ndb = home+\"/exhibit/unemployment.db\"\n\nif isfile(db):\n\tprint \"making backup at \"+bu\n\tshutil.copyfile(db,bu)\n\n\n#DB config\n#db = 'unemployment.db'\nconn = sq.connect(db) #connection is open\nconn.row_factory = sq.Row\nforce = True;\n#setup vars\ntoday = datetime.datetime.now()\ntoday = pytz.utc.localize(today);\nstamp = today.strftime(\"%Y-%m-%d %H:%M:%S%z\")\n\n#get string date for one decade ago\ntmpStamp = today.strftime(\"%Y-%m-%d\")\nlDate = tmpStamp.split(\"-\")\nlDate[0] = str(int(lDate[0]) - 10);\nstartDate = datetime.date(int(lDate[0]),int(lDate[1]),int(lDate[2]))\nstartStr = lDate[0]+\"-\"+lDate[1]+\"-\"+lDate[2]\nargs[\"observation_start\"] = startStr\n\ndef get_ids():\n\tc = conn.cursor()\n\tc.execute(\"SELECT series_id FROM ser_id\");\n\trows = c.fetchall()\n\treturn rows\n\n#check that all series are present, and up to date.\ndef check_series():\n\tif force == True:\n\t\tdelete_rows()\n\t\tprint \"Forced, deleting rows\"\n\tids = get_ids() #get all ids from db\n\t#print ids\n\tc = conn.cursor()\n\tfor id in ids:\n\t\ti = (id[\"series_id\"],)\n\t\tif i[0] != \"N/A\":\n\t\t\tc.execute(\"SELECT * FROM ser_data WHERE ser_id=?\",i)\n\t\t\tdata = c.fetchone();\n\t\t\tif data is None or force == True: #this id is not in db\n\t\t\t\tprint('There is no series named %s in database, syncing with FRED...'%i)\n\t\t\t\tcreate_series(i)\n\t\t\telse: #id is found\n\t\t\t\tdate_check = check_date(data[\"date\"]) #check if up to date\n\t\t\t\tif date_check: \n\t\t\t\t\tupdate_series(i)\n\n\n\ndef get_series(id):\n\targs[\"series_id\"] = id;\n\tr = rq.get(urls[\"FRED_SER\"], params=args)\n\tj = r.json();\n\t_date = j[\"seriess\"][0][\"last_updated\"]\n\treturn {\"series\":j, 'date':_date}\n\ndef get_obs(id):\n\targs[\"series_id\"] = id;\n\tr = rq.get(urls[\"FRED_OBS\"], params=args)\n\tj = r.json();\n\t_obs = j[\"observations\"]\n\tnullItems = []\n\tfor (oi, ob) in enumerate(_obs):\n\t\tif ob[\"value\"] == \".\":\n\t\t\tnullItems.append(oi)\n\t\t\tprint(\"Null Items found at \"+str(oi))\n\t\t\t_obs[oi] = \"null\"\n\tfor (ni, nn) in enumerate(nullItems):\n\t\t_obs.remove(\"null\")\n#\tprint _obs\n\treturn _obs\n\t\ndef create_series(id):\n\tc = conn.cursor()\n\tobs = get_obs(id)\n\tser = get_series(id)\n\tdate = ser[\"date\"]\n\tser = ser[\"series\"]\n\tq = (id,ser,obs,date);\n\tc.execute(\"INSERT INTO ser_data VALUES(?,?,?,?,?)\", (stamp,str(id[0]),json.dumps(ser),json.dumps(obs),date))\n\tconn.commit()\n\ndef delete_rows():\n\tc = conn.cursor()\n\tc.execute(\"DELETE FROM ser_data\")\n\tconn.commit()\n\t\ndef check_date(d):\n\tdata_date = parser.parse(d);\n\tdata_utc = data_date.astimezone(pytz.utc);\n\tcheck = today < data_utc\n\treturn check\n\ndef update_series(id):\n\tc = conn.cursor()\n\tobs = get_obs(id)\n\tser = get_series(id)\n\tdate = ser[\"date\"]\n\tser = ser[\"series\"]\n\tq = (id,ser,obs,date);\n\tc.execute(\"UPDATE ser_data SET series = ?, observations = ?, date = ?, updated = ? WHERE ser_id = ? \", (json.dumps(ser),json.dumps(obs),date,stamp, str(id[0])))\n\tconn.commit();\n\tprint(\"seriess updated\")\n\t\ncheck_series()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from distutils.core import setup, Extension
setup(name='supermodule', version='1.0', \
ext_modules=[Extension('supermodule', ['main.c'])])
|
normal
|
{
"blob_id": "78c8f953b924f3e664570b844bf736a788e9cfb7",
"index": 3607,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsetup(name='supermodule', version='1.0', ext_modules=[Extension(\n 'supermodule', ['main.c'])])\n",
"step-3": "from distutils.core import setup, Extension\nsetup(name='supermodule', version='1.0', ext_modules=[Extension(\n 'supermodule', ['main.c'])])\n",
"step-4": "from distutils.core import setup, Extension\nsetup(name='supermodule', version='1.0', \\\n ext_modules=[Extension('supermodule', ['main.c'])])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import string
import random
file_one_time_pad = open("encryption_file.txt","r")
p_text = file_one_time_pad.read()
file_one_time_pad.close()
print(p_text)
p_text = str.lower(p_text)
main_text = []
p_text_numerical = []
temp_key = [21,25,20,15,16,14,10,26,24,9,8,13]
alphabets = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
main_key = []
cipher_text = []
cipher_text_numerical = []
length_p_text = len(p_text)
length_temp_key = len(temp_key)
random_alpha = 0
decipher_text = []
decipher_numerical = []
##Getting the numerical values of the text
for i in p_text:
main_text.append(i)
for i in range(length_p_text):
for j in range(25):
if main_text[i] == alphabets[j]:
p_text_numerical.append(j)
break
##Generating keys dynamically
if length_p_text == length_temp_key:
for i in range(length_temp_key-1):
main_key.append(temp_key[i])
elif length_p_text < length_temp_key:
for i in range(length_p_text-1):
main_key.append(temp_key[i])
else:
for i in range(length_temp_key-1):
main_key.append(temp_key[i])
diff = length_p_text - length_temp_key
for i in range(diff):
random_alpha = random.choice(temp_key)
main_key.append(random_alpha)
print("The main key is :: \n")
print(main_key)
print("The length of p_text_numerical:: \t",len(p_text_numerical))
print("\n")
print("The length of the main_key is :: \t",len(main_key))
## Ciphering algorithm
for i in range(length_p_text-1):
cipher_text_numerical.append(abs(p_text_numerical[i]+main_key[i]))
print("The cipherred text is :: \n")
print(cipher_text_numerical)
## Deciphering algorithm
length_cipher = len(cipher_text_numerical)
for i in range(length_cipher):
decipher_numerical.append(cipher_text_numerical[i] - main_key[i])
print("The decipherred numerical::\n")
print(decipher_numerical)
temp = 0
for i in range(length_p_text-1):
temp = decipher_numerical[i]
decipher_text.append(alphabets[temp])
deciphered_one = ""
for i in decipher_text:
deciphered_one = deciphered_one + i
file_encrypt = open("encryption_file.txt","w")
file_encrypt.write(deciphered_one)
file_encrypt.close()
print("The deciphered text is ::\n")
print(decipher_text)
|
normal
|
{
"blob_id": "4b647d37d390a4df42f29bbfc7e4bae4e77c5828",
"index": 8935,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfile_one_time_pad.close()\nprint(p_text)\n<mask token>\nfor i in p_text:\n main_text.append(i)\nfor i in range(length_p_text):\n for j in range(25):\n if main_text[i] == alphabets[j]:\n p_text_numerical.append(j)\n break\nif length_p_text == length_temp_key:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\nelif length_p_text < length_temp_key:\n for i in range(length_p_text - 1):\n main_key.append(temp_key[i])\nelse:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\n diff = length_p_text - length_temp_key\n for i in range(diff):\n random_alpha = random.choice(temp_key)\n main_key.append(random_alpha)\nprint('The main key is :: \\n')\nprint(main_key)\nprint('The length of p_text_numerical:: \\t', len(p_text_numerical))\nprint('\\n')\nprint('The length of the main_key is :: \\t', len(main_key))\nfor i in range(length_p_text - 1):\n cipher_text_numerical.append(abs(p_text_numerical[i] + main_key[i]))\nprint('The cipherred text is :: \\n')\nprint(cipher_text_numerical)\n<mask token>\nfor i in range(length_cipher):\n decipher_numerical.append(cipher_text_numerical[i] - main_key[i])\nprint('The decipherred numerical::\\n')\nprint(decipher_numerical)\n<mask token>\nfor i in range(length_p_text - 1):\n temp = decipher_numerical[i]\n decipher_text.append(alphabets[temp])\n<mask token>\nfor i in decipher_text:\n deciphered_one = deciphered_one + i\n<mask token>\nfile_encrypt.write(deciphered_one)\nfile_encrypt.close()\nprint('The deciphered text is ::\\n')\nprint(decipher_text)\n",
"step-3": "<mask token>\nfile_one_time_pad = open('encryption_file.txt', 'r')\np_text = file_one_time_pad.read()\nfile_one_time_pad.close()\nprint(p_text)\np_text = str.lower(p_text)\nmain_text = []\np_text_numerical = []\ntemp_key = [21, 25, 20, 15, 16, 14, 10, 26, 24, 9, 8, 13]\nalphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',\n 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']\nmain_key = []\ncipher_text = []\ncipher_text_numerical = []\nlength_p_text = len(p_text)\nlength_temp_key = len(temp_key)\nrandom_alpha = 0\ndecipher_text = []\ndecipher_numerical = []\nfor i in p_text:\n main_text.append(i)\nfor i in range(length_p_text):\n for j in range(25):\n if main_text[i] == alphabets[j]:\n p_text_numerical.append(j)\n break\nif length_p_text == length_temp_key:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\nelif length_p_text < length_temp_key:\n for i in range(length_p_text - 1):\n main_key.append(temp_key[i])\nelse:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\n diff = length_p_text - length_temp_key\n for i in range(diff):\n random_alpha = random.choice(temp_key)\n main_key.append(random_alpha)\nprint('The main key is :: \\n')\nprint(main_key)\nprint('The length of p_text_numerical:: \\t', len(p_text_numerical))\nprint('\\n')\nprint('The length of the main_key is :: \\t', len(main_key))\nfor i in range(length_p_text - 1):\n cipher_text_numerical.append(abs(p_text_numerical[i] + main_key[i]))\nprint('The cipherred text is :: \\n')\nprint(cipher_text_numerical)\nlength_cipher = len(cipher_text_numerical)\nfor i in range(length_cipher):\n decipher_numerical.append(cipher_text_numerical[i] - main_key[i])\nprint('The decipherred numerical::\\n')\nprint(decipher_numerical)\ntemp = 0\nfor i in range(length_p_text - 1):\n temp = decipher_numerical[i]\n decipher_text.append(alphabets[temp])\ndeciphered_one = ''\nfor i in decipher_text:\n deciphered_one = deciphered_one + i\nfile_encrypt = open('encryption_file.txt', 'w')\nfile_encrypt.write(deciphered_one)\nfile_encrypt.close()\nprint('The deciphered text is ::\\n')\nprint(decipher_text)\n",
"step-4": "import string\nimport random\nfile_one_time_pad = open('encryption_file.txt', 'r')\np_text = file_one_time_pad.read()\nfile_one_time_pad.close()\nprint(p_text)\np_text = str.lower(p_text)\nmain_text = []\np_text_numerical = []\ntemp_key = [21, 25, 20, 15, 16, 14, 10, 26, 24, 9, 8, 13]\nalphabets = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',\n 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']\nmain_key = []\ncipher_text = []\ncipher_text_numerical = []\nlength_p_text = len(p_text)\nlength_temp_key = len(temp_key)\nrandom_alpha = 0\ndecipher_text = []\ndecipher_numerical = []\nfor i in p_text:\n main_text.append(i)\nfor i in range(length_p_text):\n for j in range(25):\n if main_text[i] == alphabets[j]:\n p_text_numerical.append(j)\n break\nif length_p_text == length_temp_key:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\nelif length_p_text < length_temp_key:\n for i in range(length_p_text - 1):\n main_key.append(temp_key[i])\nelse:\n for i in range(length_temp_key - 1):\n main_key.append(temp_key[i])\n diff = length_p_text - length_temp_key\n for i in range(diff):\n random_alpha = random.choice(temp_key)\n main_key.append(random_alpha)\nprint('The main key is :: \\n')\nprint(main_key)\nprint('The length of p_text_numerical:: \\t', len(p_text_numerical))\nprint('\\n')\nprint('The length of the main_key is :: \\t', len(main_key))\nfor i in range(length_p_text - 1):\n cipher_text_numerical.append(abs(p_text_numerical[i] + main_key[i]))\nprint('The cipherred text is :: \\n')\nprint(cipher_text_numerical)\nlength_cipher = len(cipher_text_numerical)\nfor i in range(length_cipher):\n decipher_numerical.append(cipher_text_numerical[i] - main_key[i])\nprint('The decipherred numerical::\\n')\nprint(decipher_numerical)\ntemp = 0\nfor i in range(length_p_text - 1):\n temp = decipher_numerical[i]\n decipher_text.append(alphabets[temp])\ndeciphered_one = ''\nfor i in decipher_text:\n deciphered_one = deciphered_one + i\nfile_encrypt = open('encryption_file.txt', 'w')\nfile_encrypt.write(deciphered_one)\nfile_encrypt.close()\nprint('The deciphered text is ::\\n')\nprint(decipher_text)\n",
"step-5": "import string\nimport random\n\nfile_one_time_pad = open(\"encryption_file.txt\",\"r\")\np_text = file_one_time_pad.read()\nfile_one_time_pad.close()\nprint(p_text)\np_text = str.lower(p_text)\nmain_text = []\np_text_numerical = []\ntemp_key = [21,25,20,15,16,14,10,26,24,9,8,13]\nalphabets = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']\nmain_key = []\ncipher_text = []\ncipher_text_numerical = []\nlength_p_text = len(p_text)\nlength_temp_key = len(temp_key)\nrandom_alpha = 0\ndecipher_text = []\ndecipher_numerical = []\n\n\n\n##Getting the numerical values of the text\nfor i in p_text:\n\tmain_text.append(i)\n\nfor i in range(length_p_text):\n\tfor j in range(25):\n\t\tif main_text[i] == alphabets[j]:\n\t\t\tp_text_numerical.append(j)\n\t\t\tbreak \n\n\n##Generating keys dynamically\nif length_p_text == length_temp_key:\n\tfor i in range(length_temp_key-1):\n\t\tmain_key.append(temp_key[i])\nelif length_p_text < length_temp_key:\n\tfor i in range(length_p_text-1):\n\t\tmain_key.append(temp_key[i])\nelse:\n\tfor i in range(length_temp_key-1):\n\t\tmain_key.append(temp_key[i])\n\tdiff = length_p_text - length_temp_key\n\tfor i in range(diff):\n\t\trandom_alpha = random.choice(temp_key)\n\t\tmain_key.append(random_alpha)\nprint(\"The main key is :: \\n\")\nprint(main_key)\nprint(\"The length of p_text_numerical:: \\t\",len(p_text_numerical))\nprint(\"\\n\")\nprint(\"The length of the main_key is :: \\t\",len(main_key))\n\n## Ciphering algorithm\n\nfor i in range(length_p_text-1):\n\tcipher_text_numerical.append(abs(p_text_numerical[i]+main_key[i]))\nprint(\"The cipherred text is :: \\n\")\nprint(cipher_text_numerical)\n\n\n## Deciphering algorithm\nlength_cipher = len(cipher_text_numerical)\nfor i in range(length_cipher):\n\tdecipher_numerical.append(cipher_text_numerical[i] - main_key[i])\nprint(\"The decipherred numerical::\\n\")\nprint(decipher_numerical)\n\ntemp = 0\nfor i in range(length_p_text-1):\n\ttemp = decipher_numerical[i]\t\n\tdecipher_text.append(alphabets[temp])\n\ndeciphered_one = \"\"\nfor i in decipher_text:\n\tdeciphered_one = deciphered_one + i\n\nfile_encrypt = open(\"encryption_file.txt\",\"w\")\nfile_encrypt.write(deciphered_one)\nfile_encrypt.close()\nprint(\"The deciphered text is ::\\n\")\nprint(decipher_text)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from flask import (Flask, g, render_template, flash, redirect, url_for)
from flask_login import (LoginManager, login_user, logout_user,
login_required, current_user)
import forms
import models
import sqlite3
DEBUG = True
app = Flask(__name__)
app.secret_key = 'auoesh.bouoastuh.43,uoausoehuoshuosth3ououea.auoub!'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(userid):
try:
return models.user.get(models.User.id == userid)
except models.DoesNotExist:
return None
def initialize():
models.DATABASE.connect()
models.DATABASE.create_tables([models.User], safe=True)
models.DATABASE.closer()
@app.before_request
def before_request():
""""Connect to the database before each request."""
g.db = models.DATABASE
g.db.connect()
g.user = current_user
@app.after_request
def after_request(response):
"""""Close the database connection after request. """
g.db.close()
return response
@app.route('/register', methods=('GET', 'POST'))
def register():
form = forms.RegistrationForm()
if form.validate_on_submit():
flash("Yay, you registered", "sucess")
models.User.create_user(
username=form.username.data,
email=form.email.data,
password=form.password.data,
confrimpassword=form.password.data
)
return redirect(url_for('index'))
return render_template('register.html', form=form)
def check_password_hash(password, data):
pass
@app.route('/login', methods=('GET', 'POST'))
def login():
form = forms.LoginForm()
if form.validate_on_submit():
try:
user = models.User.get(models.User.emails == form.email.data)
except models.DoesNOtExit:
flash("Your email or password doesn't match !", "error")
else:
if check_password_hash(user.password, form.password.data):
login_user(user)
flash("You've been logged in:", "Sucess")
return redirect(url_for('index'))
else:
flash("Your email or password doesn't match!", "error")
return render_template('login.html', form=form)
@app.route('/logout')
@login_required
def logout():
logout_user()
flash("You.ve been logged out! Come back soon!", "sucess")
return redirect(url_for('index'))
@app.route('/new_post', methods=('GET', 'POST'))
@login_required #makes sures the user is logged in before been able to post
def post():
form = forms.PostForm()
if form.validate_on_submit():
models.Post.create(user=g.user._get_current_object(),
content=form.content.data.strip())
flash("Message Posted! Thanks!", "sucess")
return redirect(url_for('index'))
return render_template('post.html', form=form)
@app.route('/')
def index():
return 'Hey!'
"""
models.initialize()
try:
models.User.create_user(
username='Steve',
email='[email protected]',
password='passsword',
admin=True
)
except ValueError:
pass
"""
if __name__ == '__main__':
app.run(debug=DEBUG)
|
normal
|
{
"blob_id": "849c468e4890c19806c678089ec8668576538b12",
"index": 2717,
"step-1": "<mask token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.user.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\ndef initialize():\n models.DATABASE.connect()\n models.DATABASE.create_tables([models.User], safe=True)\n models.DATABASE.closer()\n\n\[email protected]_request\ndef before_request():\n \"\"\"\"Connect to the database before each request.\"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<mask token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n flash('Yay, you registered', 'sucess')\n models.User.create_user(username=form.username.data, email=form.\n email.data, password=form.password.data, confrimpassword=form.\n password.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\ndef check_password_hash(password, data):\n pass\n\n\n<mask token>\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash('You.ve been logged out! Come back soon!', 'sucess')\n return redirect(url_for('index'))\n\n\[email protected]('/new_post', methods=('GET', 'POST'))\n@login_required\ndef post():\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(user=g.user._get_current_object(), content=form.\n content.data.strip())\n flash('Message Posted! Thanks!', 'sucess')\n return redirect(url_for('index'))\n return render_template('post.html', form=form)\n\n\[email protected]('/')\ndef index():\n return 'Hey!'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.user.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\ndef initialize():\n models.DATABASE.connect()\n models.DATABASE.create_tables([models.User], safe=True)\n models.DATABASE.closer()\n\n\[email protected]_request\ndef before_request():\n \"\"\"\"Connect to the database before each request.\"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\n<mask token>\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n flash('Yay, you registered', 'sucess')\n models.User.create_user(username=form.username.data, email=form.\n email.data, password=form.password.data, confrimpassword=form.\n password.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\ndef check_password_hash(password, data):\n pass\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.emails == form.email.data)\n except models.DoesNOtExit:\n flash(\"Your email or password doesn't match !\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in:\", 'Sucess')\n return redirect(url_for('index'))\n else:\n flash(\"Your email or password doesn't match!\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash('You.ve been logged out! Come back soon!', 'sucess')\n return redirect(url_for('index'))\n\n\[email protected]('/new_post', methods=('GET', 'POST'))\n@login_required\ndef post():\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(user=g.user._get_current_object(), content=form.\n content.data.strip())\n flash('Message Posted! Thanks!', 'sucess')\n return redirect(url_for('index'))\n return render_template('post.html', form=form)\n\n\[email protected]('/')\ndef index():\n return 'Hey!'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.user.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\ndef initialize():\n models.DATABASE.connect()\n models.DATABASE.create_tables([models.User], safe=True)\n models.DATABASE.closer()\n\n\[email protected]_request\ndef before_request():\n \"\"\"\"Connect to the database before each request.\"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"\"\"Close the database connection after request. \"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n flash('Yay, you registered', 'sucess')\n models.User.create_user(username=form.username.data, email=form.\n email.data, password=form.password.data, confrimpassword=form.\n password.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\ndef check_password_hash(password, data):\n pass\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.emails == form.email.data)\n except models.DoesNOtExit:\n flash(\"Your email or password doesn't match !\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in:\", 'Sucess')\n return redirect(url_for('index'))\n else:\n flash(\"Your email or password doesn't match!\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash('You.ve been logged out! Come back soon!', 'sucess')\n return redirect(url_for('index'))\n\n\[email protected]('/new_post', methods=('GET', 'POST'))\n@login_required\ndef post():\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(user=g.user._get_current_object(), content=form.\n content.data.strip())\n flash('Message Posted! Thanks!', 'sucess')\n return redirect(url_for('index'))\n return render_template('post.html', form=form)\n\n\[email protected]('/')\ndef index():\n return 'Hey!'\n\n\n<mask token>\n",
"step-4": "from flask import Flask, g, render_template, flash, redirect, url_for\nfrom flask_login import LoginManager, login_user, logout_user, login_required, current_user\nimport forms\nimport models\nimport sqlite3\nDEBUG = True\napp = Flask(__name__)\napp.secret_key = 'auoesh.bouoastuh.43,uoausoehuoshuosth3ououea.auoub!'\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'login'\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.user.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\ndef initialize():\n models.DATABASE.connect()\n models.DATABASE.create_tables([models.User], safe=True)\n models.DATABASE.closer()\n\n\[email protected]_request\ndef before_request():\n \"\"\"\"Connect to the database before each request.\"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n \"\"\"\"\"Close the database connection after request. \"\"\"\n g.db.close()\n return response\n\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n flash('Yay, you registered', 'sucess')\n models.User.create_user(username=form.username.data, email=form.\n email.data, password=form.password.data, confrimpassword=form.\n password.data)\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\ndef check_password_hash(password, data):\n pass\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.emails == form.email.data)\n except models.DoesNOtExit:\n flash(\"Your email or password doesn't match !\", 'error')\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in:\", 'Sucess')\n return redirect(url_for('index'))\n else:\n flash(\"Your email or password doesn't match!\", 'error')\n return render_template('login.html', form=form)\n\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash('You.ve been logged out! Come back soon!', 'sucess')\n return redirect(url_for('index'))\n\n\[email protected]('/new_post', methods=('GET', 'POST'))\n@login_required\ndef post():\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(user=g.user._get_current_object(), content=form.\n content.data.strip())\n flash('Message Posted! Thanks!', 'sucess')\n return redirect(url_for('index'))\n return render_template('post.html', form=form)\n\n\[email protected]('/')\ndef index():\n return 'Hey!'\n\n\n<mask token>\nif __name__ == '__main__':\n app.run(debug=DEBUG)\n",
"step-5": "from flask import (Flask, g, render_template, flash, redirect, url_for)\nfrom flask_login import (LoginManager, login_user, logout_user,\n login_required, current_user)\n\nimport forms\nimport models\nimport sqlite3\n\nDEBUG = True\n\napp = Flask(__name__)\napp.secret_key = 'auoesh.bouoastuh.43,uoausoehuoshuosth3ououea.auoub!'\n\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\nlogin_manager.login_view = 'login'\n\n\n@login_manager.user_loader\ndef load_user(userid):\n try:\n return models.user.get(models.User.id == userid)\n except models.DoesNotExist:\n return None\n\n\ndef initialize():\n models.DATABASE.connect()\n models.DATABASE.create_tables([models.User], safe=True)\n models.DATABASE.closer()\n\n\[email protected]_request\ndef before_request():\n \"\"\"\"Connect to the database before each request.\"\"\"\n g.db = models.DATABASE\n g.db.connect()\n g.user = current_user\n\[email protected]_request\ndef after_request(response):\n \"\"\"\"\"Close the database connection after request. \"\"\"\n g.db.close()\n return response\n\[email protected]('/register', methods=('GET', 'POST'))\ndef register():\n form = forms.RegistrationForm()\n if form.validate_on_submit():\n flash(\"Yay, you registered\", \"sucess\")\n models.User.create_user(\n username=form.username.data,\n email=form.email.data,\n password=form.password.data,\n confrimpassword=form.password.data\n )\n return redirect(url_for('index'))\n return render_template('register.html', form=form)\n\n\ndef check_password_hash(password, data):\n pass\n\n\[email protected]('/login', methods=('GET', 'POST'))\ndef login():\n form = forms.LoginForm()\n if form.validate_on_submit():\n try:\n user = models.User.get(models.User.emails == form.email.data)\n except models.DoesNOtExit:\n flash(\"Your email or password doesn't match !\", \"error\")\n else:\n if check_password_hash(user.password, form.password.data):\n login_user(user)\n flash(\"You've been logged in:\", \"Sucess\")\n return redirect(url_for('index'))\n else:\n flash(\"Your email or password doesn't match!\", \"error\")\n return render_template('login.html', form=form)\n\[email protected]('/logout')\n@login_required\ndef logout():\n logout_user()\n flash(\"You.ve been logged out! Come back soon!\", \"sucess\")\n return redirect(url_for('index'))\n\[email protected]('/new_post', methods=('GET', 'POST'))\n@login_required #makes sures the user is logged in before been able to post\ndef post():\n form = forms.PostForm()\n if form.validate_on_submit():\n models.Post.create(user=g.user._get_current_object(),\n content=form.content.data.strip())\n flash(\"Message Posted! Thanks!\", \"sucess\")\n return redirect(url_for('index'))\n return render_template('post.html', form=form)\n\[email protected]('/')\ndef index():\n return 'Hey!'\n\n\"\"\"\nmodels.initialize()\ntry:\n models.User.create_user(\n username='Steve',\n email='[email protected]',\n password='passsword',\n admin=True\n )\n except ValueError:\n pass\n\"\"\" \nif __name__ == '__main__':\n app.run(debug=DEBUG)\n",
"step-ids": [
8,
9,
10,
13,
14
]
}
|
[
8,
9,
10,
13,
14
] |
from keras.models import Sequential
from keras.layers import Convolution2D # for 2d images
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense
import tensorflow as tf
from keras_preprocessing.image import ImageDataGenerator
cnn = Sequential()
rgb = 64
# step 1: convolution
# slide feature detectors ("filters") along image
# results feature maps that form convolutional layer
cnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu')) # 32, 3x3 filters
# step 2: pooling
cnn.add(MaxPool2D(pool_size=(2, 2)))
# step 3: flatten
# this vector will be the input of a future ann
cnn.add(Flatten())
# step 4: full connection
cnn.add(Dense(output_dim=128, activation='relu')) # add hidden layers
cnn.add(Dense(output_dim=1, activation='sigmoid')) # sigmoid for binary output
# compile cnn
cnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# image augmentation - prevent overfitting
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
train_set = train_datagen.flow_from_directory(
'dataset/training_set',
target_size=(rgb, rgb),
batch_size=32,
class_mode='binary')
test_set = test_datagen.flow_from_directory(
'dataset/test_set',
target_size=(rgb, rgb),
batch_size=32,
class_mode='binary')
cnn.fit_generator(
train_set,
steps_per_epoch=8000, # we have 8k images in our training set
epochs=10,
validation_data=test_set,
validation_steps=2000)
print(cnn.summary())
cnn.save('CatDogModel.h5')
|
normal
|
{
"blob_id": "9fa5f4b4aeb7fe42d313a0ec4e57ce15acbfcf46",
"index": 3960,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n<mask token>\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-3": "<mask token>\ncnn = Sequential()\nrgb = 64\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntrain_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(rgb, rgb), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(rgb, rgb), batch_size=32, class_mode='binary')\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-4": "from keras.models import Sequential\nfrom keras.layers import Convolution2D\nfrom keras.layers import MaxPool2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\nimport tensorflow as tf\nfrom keras_preprocessing.image import ImageDataGenerator\ncnn = Sequential()\nrgb = 64\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu'))\ncnn.add(MaxPool2D(pool_size=(2, 2)))\ncnn.add(Flatten())\ncnn.add(Dense(output_dim=128, activation='relu'))\ncnn.add(Dense(output_dim=1, activation='sigmoid'))\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\ntrain_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2,\n zoom_range=0.2, horizontal_flip=True)\ntest_datagen = ImageDataGenerator(rescale=1.0 / 255)\ntrain_set = train_datagen.flow_from_directory('dataset/training_set',\n target_size=(rgb, rgb), batch_size=32, class_mode='binary')\ntest_set = test_datagen.flow_from_directory('dataset/test_set', target_size\n =(rgb, rgb), batch_size=32, class_mode='binary')\ncnn.fit_generator(train_set, steps_per_epoch=8000, epochs=10,\n validation_data=test_set, validation_steps=2000)\nprint(cnn.summary())\ncnn.save('CatDogModel.h5')\n",
"step-5": "from keras.models import Sequential\nfrom keras.layers import Convolution2D # for 2d images\nfrom keras.layers import MaxPool2D\nfrom keras.layers import Flatten\nfrom keras.layers import Dense\nimport tensorflow as tf\nfrom keras_preprocessing.image import ImageDataGenerator\n\ncnn = Sequential()\n\nrgb = 64\n\n# step 1: convolution\n# slide feature detectors (\"filters\") along image\n# results feature maps that form convolutional layer\ncnn.add(Convolution2D(32, 3, 3, input_shape=(rgb, rgb, 3), activation='relu')) # 32, 3x3 filters\n\n# step 2: pooling\ncnn.add(MaxPool2D(pool_size=(2, 2)))\n\n# step 3: flatten\n# this vector will be the input of a future ann\ncnn.add(Flatten())\n\n# step 4: full connection\ncnn.add(Dense(output_dim=128, activation='relu')) # add hidden layers\ncnn.add(Dense(output_dim=1, activation='sigmoid')) # sigmoid for binary output\n\n# compile cnn\ncnn.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])\n\n# image augmentation - prevent overfitting\ntrain_datagen = ImageDataGenerator(\n rescale=1./255,\n shear_range=0.2,\n zoom_range=0.2,\n horizontal_flip=True)\n\ntest_datagen = ImageDataGenerator(rescale=1./255)\n\ntrain_set = train_datagen.flow_from_directory(\n 'dataset/training_set',\n target_size=(rgb, rgb),\n batch_size=32,\n class_mode='binary')\n\ntest_set = test_datagen.flow_from_directory(\n 'dataset/test_set',\n target_size=(rgb, rgb),\n batch_size=32,\n class_mode='binary')\n\ncnn.fit_generator(\n train_set,\n steps_per_epoch=8000, # we have 8k images in our training set\n epochs=10,\n validation_data=test_set,\n validation_steps=2000)\n\nprint(cnn.summary())\n\ncnn.save('CatDogModel.h5')\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.shortcuts import render, HttpResponse, redirect
from ..login.models import *
from ..dashboard.models import *
def display(request, id):
context = {'job': Job.objects.get(id=int(id))}
return render(request, 'handy_helper_exam/display.html', context)
|
normal
|
{
"blob_id": "f1fdba1c07a29aa22ee8d0dcbd6f902aa2e8b4c2",
"index": 9342,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef display(request, id):\n context = {'job': Job.objects.get(id=int(id))}\n return render(request, 'handy_helper_exam/display.html', context)\n",
"step-3": "from django.shortcuts import render, HttpResponse, redirect\nfrom ..login.models import *\nfrom ..dashboard.models import *\n\n\ndef display(request, id):\n context = {'job': Job.objects.get(id=int(id))}\n return render(request, 'handy_helper_exam/display.html', context)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/python
import os
import sys
fdatadir = "/fdata/hepx/store/user/taohuang/NANOAOD/"
datasets = []; NumSample = []; sampleN_short = []
Nanodatasets = []; localdirs = {}
MCxsections = []
#doTT=True; doDY=True; doVV=True; doSingleT=True; doWjets=True; dottV=True
##DoubleEG
datasets.append('/DoubleEG/Run2016B-05Feb2018_ver1-v1/NANOAOD')
NumSample.append('-1'); sampleN_short.append('DoubleEGRun2016Bver1')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016B-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-2'); sampleN_short.append('DoubleEGRun2016Bver2')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016C-05Feb2018-v1/NANOAOD')
NumSample.append('-3'); sampleN_short.append('DoubleEGRun2016C')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016D-05Feb2018-v1/NANOAOD')
NumSample.append('-4'); sampleN_short.append('DoubleEGRun2016D')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016E-05Feb2018-v1/NANOAOD')
NumSample.append('-5'); sampleN_short.append('DoubleEGRun2016E')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016F-05Feb2018-v1/NANOAOD')
NumSample.append('-6'); sampleN_short.append('DoubleEGRun2016F')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016G-05Feb2018-v1/NANOAOD')
NumSample.append('-7'); sampleN_short.append('DoubleEGRun2016G')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016H-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-8'); sampleN_short.append('DoubleEGRun2016Hver2')
MCxsections.append(-1.0)
datasets.append('/DoubleEG/Run2016H-05Feb2018_ver3-v1/NANOAOD')
NumSample.append('-9'); sampleN_short.append('DoubleEGRun2016Hver3')
MCxsections.append(-1.0)
##DoubleMuon
datasets.append('/DoubleMuon/Run2016B-05Feb2018_ver1-v1/NANOAOD')
NumSample.append('-10'); sampleN_short.append('DoubleMuonRun2016Bver1')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016B-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-11'); sampleN_short.append('DoubleMuonRun2016Bver2')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016C-05Feb2018-v1/NANOAOD')
NumSample.append('-12'); sampleN_short.append('DoubleMuonRun2016C')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016D-05Feb2018-v1/NANOAOD')
NumSample.append('-13'); sampleN_short.append('DoubleMuonRun2016D')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016E-05Feb2018-v1/NANOAOD')
NumSample.append('-14'); sampleN_short.append('DoubleMuonRun2016E')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016F-05Feb2018-v1/NANOAOD')
NumSample.append('-15'); sampleN_short.append('DoubleMuonRun2016F')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016G-05Feb2018-v1/NANOAOD')
NumSample.append('-16'); sampleN_short.append('DoubleMuonRun2016G')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016H-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-17'); sampleN_short.append('DoubleMuonRun2016Hver2')
MCxsections.append(-1.0)
datasets.append('/DoubleMuon/Run2016H-05Feb2018_ver3-v1/NANOAOD')
NumSample.append('-18'); sampleN_short.append('DoubleMuonRun2016Hver3')
MCxsections.append(-1.0)
#MuonEG
datasets.append('/MuonEG/Run2016B-05Feb2018_ver1-v1/NANOAOD')
NumSample.append('-19'); sampleN_short.append('MuonEGRun2016Bver2')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016B-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-20'); sampleN_short.append('MuonEGRun2016Bver2')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016C-05Feb2018-v1/NANOAOD')
NumSample.append('-21'); sampleN_short.append('MuonEGRun2016C')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016D-05Feb2018-v1/NANOAOD')
NumSample.append('-22'); sampleN_short.append('MuonEGRun2016D')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016E-05Feb2018-v1/NANOAOD')
NumSample.append('-23'); sampleN_short.append('MuonEGRun2016E')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016F-05Feb2018-v1/NANOAOD')
NumSample.append('-24'); sampleN_short.append('MuonEGRun2016F')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016G-05Feb2018-v1/NANOAOD')
NumSample.append('-25'); sampleN_short.append('MuonEGRun2016G')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016H-05Feb2018_ver2-v1/NANOAOD')
NumSample.append('-26'); sampleN_short.append('MuonEGRun2016Hver2')
MCxsections.append(-1.0)
datasets.append('/MuonEG/Run2016H-05Feb2018_ver3-v1/NANOAOD')
NumSample.append('-27'); sampleN_short.append('MuonEGRun2016Hver3')
MCxsections.append(-1.0)
masspoints = [260, 270, 300, 350, 400, 450, 500, 550, 600, 650, 750, 800, 900]
for mass in masspoints:
datasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-%d_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM"%mass)
NumSample.append(masspoints.index(mass)); sampleN_short.append('RadionM%d'%mass)
MCxsections.append(5.0)#by default, assume the cross section for signal is 5pb
#datasets.append("/GluGluToBulkGravitonToHHTo2B2VTo2L2Nu_M-*_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
#NumSample.append('2'); sampleN_short.append('Graviton')
# TT## FIXME, use official one later
#datasets.append('/TTTo2L2Nu_13TeV-powheg/RunIISpring16MiniAODv2-PUSpring16_80X_mcRun2_asymptotic_2016_miniAODv2_v0_ext1-v1/MINIAODSIM')
datasets.append('/TTTo2L2Nu_TuneCUETP8M2_ttHtranche3_13TeV-powheg-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
#datasets.append('/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/arizzi-RunIIFall17MiniAOD-94X-Nano01Fall17-e273b12d9f89d622a34e4bc98b05ee29/USER')
NumSample.append('13'); sampleN_short.append('TT')
#MCxsections.append(72.1)
#MCxsections.append(76.7)
MCxsections.append(87.31)
# DY
#datasets.append('/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
datasets.append('/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('14'); sampleN_short.append('DY')
MCxsections.append(18610.0)
datasets.append('/DYToLL_0J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('15'); sampleN_short.append('DY')
MCxsections.append(4758.9)
datasets.append('/DYToLL_1J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('16'); sampleN_short.append('DY')
MCxsections.append(929.1)
datasets.append('/DYToLL_2J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('17'); sampleN_short.append('DY')
MCxsections.append(337.1)
# VV
datasets.append('/ZZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('18'); sampleN_short.append('VV')
MCxsections.append(3.22)
datasets.append('/ZZTo2L2Nu_13TeV_powheg_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('19'); sampleN_short.append('VV')
MCxsections.append(0.564)
datasets.append('/ZZTo4L_13TeV_powheg_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('20'); sampleN_short.append('VV')
MCxsections.append(1.256)
#datasets.append('/WWToLNuQQ_aTGC_13TeV-madgraph-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
#NumSample.append('21'); sampleN_short.append('VV')
#MCxsections.append(49.997)# ## not available now because of pdf uncertainty
#FIXME
#datasets.append('/WWTo2L2Nu_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
#datasets.append('/WWTo2L2Nu_13TeV-powheg/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
#NumSample.append('22'); sampleN_short.append('VV') ### not available now
#MCxsections.append(12.178)
datasets.append('/WZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('23'); sampleN_short.append('VV')
MCxsections.append(5.595)
#FIXME
#datasets.append('/WZTo1L3Nu_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
#NumSample.append('24'); sampleN_short.append('VV') ### not available now
#MCxsections.append(3.033)
datasets.append('/WZTo1L1Nu2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v3/MINIAODSIM')
NumSample.append('25'); sampleN_short.append('VV')
MCxsections.append(10.71)
datasets.append('/WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('26'); sampleN_short.append('VV')
MCxsections.append(4.42965)
##sT
datasets.append('/ST_t-channel_top_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('27'); sampleN_short.append('sT')
MCxsections.append(136.02)
datasets.append('/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('28'); sampleN_short.append('sT')
MCxsections.append(80.95)
datasets.append('/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('29'); sampleN_short.append('sT')
MCxsections.append(3.36)
datasets.append('/ST_tW_antitop_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('30'); sampleN_short.append('sT')
MCxsections.append(19.5545)
datasets.append('/ST_tW_top_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('31'); sampleN_short.append('sT')
MCxsections.append(19.5545)
# W + Jets
datasets.append('/WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('32'); sampleN_short.append('Wjet')
MCxsections.append(61526.7)
datasets.append('/WJetsToLNu_HT-100To200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')
NumSample.append('33'); sampleN_short.append('Wjet')
MCxsections.append(1627.45)
datasets.append('/WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')
NumSample.append('34'); sampleN_short.append('Wjet')
MCxsections.append(435.237)
datasets.append('/WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('35'); sampleN_short.append('Wjet')
MCxsections.append(59.181)
#FIXME
#datasets.append('/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
#NumSample.append('36'); sampleN_short.append('Wjet')### not available now
MCxsections.append(14.58)
datasets.append('/WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('37'); sampleN_short.append('Wjet')
MCxsections.append(6.656)
datasets.append('/WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('38'); sampleN_short.append('Wjet')
MCxsections.append(1.608)
datasets.append('/WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')
NumSample.append('39'); sampleN_short.append('Wjet')
MCxsections.append(0.0389)
# tt + V
datasets.append('/TTWJetsToQQ_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('40'); sampleN_short.append('ttV')
MCxsections.append(0.4062)
datasets.append('/TTWJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')
NumSample.append('41'); sampleN_short.append('ttV')
MCxsections.append(0.2043)
datasets.append('/TTZToQQ_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')
NumSample.append('42'); sampleN_short.append('ttV')
MCxsections.append(0.5297)
datasets.append('/TTZToLLNuNu_M-10_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext3-v1/MINIAODSIM')
NumSample.append('43'); sampleN_short.append('ttV')
MCxsections.append(0.2529)
alljobtypes = set(sampleN_short)
for job in alljobtypes:
localdirs[job] = []
for ijob, job in enumerate(datasets):
nsample = int(NumSample[ijob])
jobtype = sampleN_short[ijob]
dataname = ""
datadir = " "
#print "nsample ",nsample, " jobtype ",jobtype
if nsample < 0:
datadir = sampleN_short[ijob]
dataname = job
#print "real data nsample ",nsample, " datadir ",datadir
elif nsample > 0:
datadir = job.split('/')[1]
#print "MC nsample ",nsample, " datadir ",datadir, "MiniAOD dataset ",job.split('/')
#query = "dataset dataset=/%s/*/NANOAODSIM"%(datadir)
#pdata = os.popen("dasgoclient -limit=0 -query='{query}'".format(query = query))
#founddataset = False
#for line in pdata:
# #print "dataset ",line," datatype ",datadir
# if datadir in line:
# founddataset = True
# dataname = line[:-1]
#if not(founddataset):
# print "WARNING!!!!! no dataset found for ",datadir
localdirs[jobtype].append(os.path.join(fdatadir, datadir))
Nanodatasets.append("/DoubleEG/Run2016B-05Feb2018_ver1-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016B-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016C-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016D-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016E-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016F-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016G-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016H-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/DoubleEG/Run2016H-05Feb2018_ver3-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016B-05Feb2018_ver1-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016B-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016C-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016D-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016E-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016F-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016G-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016H-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/DoubleMuon/Run2016H-05Feb2018_ver3-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016B-05Feb2018_ver1-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016B-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016C-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016D-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016E-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016F-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016G-05Feb2018-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016H-05Feb2018_ver2-v1/NANOAOD")
Nanodatasets.append("/MuonEG/Run2016H-05Feb2018_ver3-v1/NANOAOD")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-260_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-270_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-300_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-350_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-400_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-450_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-500_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-550_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-600_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-650_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-750_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-800_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/GluGluToRadionToHHTo2B2VTo2L2Nu_M-900_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
#TTbar
#Nanodatasets.append("/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/arizzi-RunIIFall17MiniAOD-94X-Nano01Fall17-e273b12d9f89d622a34e4bc98b05ee29/USER")
Nanodatasets.append('/TTTo2L2Nu_TuneCUETP8M2_ttHtranche3_13TeV-powheg-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM')
# DY
Nanodatasets.append("/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/DYToLL_0J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
Nanodatasets.append("/DYToLL_1J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
Nanodatasets.append("/DYToLL_2J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
# VV
Nanodatasets.append("/ZZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ZZTo2L2Nu_13TeV_powheg_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ZZTo4L_13TeV_powheg_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
#Nanodatasets.append("/WWToLNuQQ_aTGC_13TeV-madgraph-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/WZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/WZTo1L1Nu2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
#sT
Nanodatasets.append("/ST_t-channel_top_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ST_tW_antitop_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/ST_tW_top_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
#W+jets
Nanodatasets.append("/WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-100To200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM")
# tt + V
Nanodatasets.append("/TTWJetsToQQ_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/TTWJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM")
Nanodatasets.append("/TTZToQQ_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM")
Nanodatasets.append("/TTZToLLNuNu_M-10_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext3-v1/NANOAODSIM")
outAnalist = {}
outAnadir = "/fdata/hepx/store/user/taohuang/HHNtuple_20180328_fixedleptonDZeff/"
for i,datasetname in enumerate( Nanodatasets ):
sampleName = sampleN_short[i]
if NumSample[i] < 0:
sampleName = "Data"
outAnafile = os.path.join(outAnadir, Nanodatasets[i].split('/')[1])
if hasattr(outAnalist, sampleName):
outAnalist[sampleName].append(outAnafile)
else:
outAnalist[sampleName] = []
outAnalist[sampleName].append(outAnafile)
dataintxt = open("2016MCSamplelist.txt","w+")
for dataset in datasets:
dataintxt.write(dataset+"\n")
dataintxt.close()
|
normal
|
{
"blob_id": "72b5e76f63e347d7275b0b711fa02b7f327785f6",
"index": 7369,
"step-1": "#!/usr/bin/python\nimport os\nimport sys\n\nfdatadir = \"/fdata/hepx/store/user/taohuang/NANOAOD/\"\ndatasets = []; NumSample = []; sampleN_short = []\nNanodatasets = []; localdirs = {}\nMCxsections = []\n#doTT=True; doDY=True; doVV=True; doSingleT=True; doWjets=True; dottV=True\n\n##DoubleEG\ndatasets.append('/DoubleEG/Run2016B-05Feb2018_ver1-v1/NANOAOD')\nNumSample.append('-1'); sampleN_short.append('DoubleEGRun2016Bver1')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016B-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-2'); sampleN_short.append('DoubleEGRun2016Bver2')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016C-05Feb2018-v1/NANOAOD')\nNumSample.append('-3'); sampleN_short.append('DoubleEGRun2016C')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016D-05Feb2018-v1/NANOAOD')\nNumSample.append('-4'); sampleN_short.append('DoubleEGRun2016D')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016E-05Feb2018-v1/NANOAOD')\nNumSample.append('-5'); sampleN_short.append('DoubleEGRun2016E')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016F-05Feb2018-v1/NANOAOD')\nNumSample.append('-6'); sampleN_short.append('DoubleEGRun2016F')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016G-05Feb2018-v1/NANOAOD')\nNumSample.append('-7'); sampleN_short.append('DoubleEGRun2016G')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016H-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-8'); sampleN_short.append('DoubleEGRun2016Hver2')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleEG/Run2016H-05Feb2018_ver3-v1/NANOAOD')\nNumSample.append('-9'); sampleN_short.append('DoubleEGRun2016Hver3')\nMCxsections.append(-1.0)\n##DoubleMuon\ndatasets.append('/DoubleMuon/Run2016B-05Feb2018_ver1-v1/NANOAOD')\nNumSample.append('-10'); sampleN_short.append('DoubleMuonRun2016Bver1')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016B-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-11'); sampleN_short.append('DoubleMuonRun2016Bver2')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016C-05Feb2018-v1/NANOAOD')\nNumSample.append('-12'); sampleN_short.append('DoubleMuonRun2016C')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016D-05Feb2018-v1/NANOAOD')\nNumSample.append('-13'); sampleN_short.append('DoubleMuonRun2016D')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016E-05Feb2018-v1/NANOAOD')\nNumSample.append('-14'); sampleN_short.append('DoubleMuonRun2016E')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016F-05Feb2018-v1/NANOAOD')\nNumSample.append('-15'); sampleN_short.append('DoubleMuonRun2016F')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016G-05Feb2018-v1/NANOAOD')\nNumSample.append('-16'); sampleN_short.append('DoubleMuonRun2016G')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016H-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-17'); sampleN_short.append('DoubleMuonRun2016Hver2')\nMCxsections.append(-1.0)\ndatasets.append('/DoubleMuon/Run2016H-05Feb2018_ver3-v1/NANOAOD')\nNumSample.append('-18'); sampleN_short.append('DoubleMuonRun2016Hver3')\nMCxsections.append(-1.0)\n#MuonEG\ndatasets.append('/MuonEG/Run2016B-05Feb2018_ver1-v1/NANOAOD')\nNumSample.append('-19'); sampleN_short.append('MuonEGRun2016Bver2')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016B-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-20'); sampleN_short.append('MuonEGRun2016Bver2')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016C-05Feb2018-v1/NANOAOD')\nNumSample.append('-21'); sampleN_short.append('MuonEGRun2016C')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016D-05Feb2018-v1/NANOAOD')\nNumSample.append('-22'); sampleN_short.append('MuonEGRun2016D')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016E-05Feb2018-v1/NANOAOD')\nNumSample.append('-23'); sampleN_short.append('MuonEGRun2016E')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016F-05Feb2018-v1/NANOAOD')\nNumSample.append('-24'); sampleN_short.append('MuonEGRun2016F')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016G-05Feb2018-v1/NANOAOD')\nNumSample.append('-25'); sampleN_short.append('MuonEGRun2016G')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016H-05Feb2018_ver2-v1/NANOAOD')\nNumSample.append('-26'); sampleN_short.append('MuonEGRun2016Hver2')\nMCxsections.append(-1.0)\ndatasets.append('/MuonEG/Run2016H-05Feb2018_ver3-v1/NANOAOD')\nNumSample.append('-27'); sampleN_short.append('MuonEGRun2016Hver3')\nMCxsections.append(-1.0)\n\n\nmasspoints = [260, 270, 300, 350, 400, 450, 500, 550, 600, 650, 750, 800, 900]\nfor mass in masspoints:\n datasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-%d_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\"%mass)\n NumSample.append(masspoints.index(mass)); sampleN_short.append('RadionM%d'%mass)\n MCxsections.append(5.0)#by default, assume the cross section for signal is 5pb\n#datasets.append(\"/GluGluToBulkGravitonToHHTo2B2VTo2L2Nu_M-*_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\n#NumSample.append('2'); sampleN_short.append('Graviton')\n\n# TT## FIXME, use official one later\n#datasets.append('/TTTo2L2Nu_13TeV-powheg/RunIISpring16MiniAODv2-PUSpring16_80X_mcRun2_asymptotic_2016_miniAODv2_v0_ext1-v1/MINIAODSIM')\ndatasets.append('/TTTo2L2Nu_TuneCUETP8M2_ttHtranche3_13TeV-powheg-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n#datasets.append('/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/arizzi-RunIIFall17MiniAOD-94X-Nano01Fall17-e273b12d9f89d622a34e4bc98b05ee29/USER')\nNumSample.append('13'); sampleN_short.append('TT')\n#MCxsections.append(72.1)\n#MCxsections.append(76.7)\nMCxsections.append(87.31)\n# DY\n#datasets.append('/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n\ndatasets.append('/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('14'); sampleN_short.append('DY')\nMCxsections.append(18610.0)\ndatasets.append('/DYToLL_0J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('15'); sampleN_short.append('DY')\nMCxsections.append(4758.9)\ndatasets.append('/DYToLL_1J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('16'); sampleN_short.append('DY')\nMCxsections.append(929.1)\ndatasets.append('/DYToLL_2J_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('17'); sampleN_short.append('DY')\nMCxsections.append(337.1)\n# VV\ndatasets.append('/ZZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('18'); sampleN_short.append('VV')\nMCxsections.append(3.22)\ndatasets.append('/ZZTo2L2Nu_13TeV_powheg_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('19'); sampleN_short.append('VV')\nMCxsections.append(0.564)\ndatasets.append('/ZZTo4L_13TeV_powheg_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('20'); sampleN_short.append('VV')\nMCxsections.append(1.256)\n#datasets.append('/WWToLNuQQ_aTGC_13TeV-madgraph-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n#NumSample.append('21'); sampleN_short.append('VV')\n#MCxsections.append(49.997)# ## not available now because of pdf uncertainty\n#FIXME\n#datasets.append('/WWTo2L2Nu_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n#datasets.append('/WWTo2L2Nu_13TeV-powheg/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n#NumSample.append('22'); sampleN_short.append('VV') ### not available now\n#MCxsections.append(12.178)\ndatasets.append('/WZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('23'); sampleN_short.append('VV')\nMCxsections.append(5.595)\n#FIXME\n#datasets.append('/WZTo1L3Nu_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\n#NumSample.append('24'); sampleN_short.append('VV') ### not available now \n#MCxsections.append(3.033)\ndatasets.append('/WZTo1L1Nu2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v3/MINIAODSIM')\nNumSample.append('25'); sampleN_short.append('VV')\nMCxsections.append(10.71)\ndatasets.append('/WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('26'); sampleN_short.append('VV')\nMCxsections.append(4.42965)\n##sT\ndatasets.append('/ST_t-channel_top_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('27'); sampleN_short.append('sT')\nMCxsections.append(136.02)\ndatasets.append('/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('28'); sampleN_short.append('sT')\nMCxsections.append(80.95)\ndatasets.append('/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('29'); sampleN_short.append('sT')\nMCxsections.append(3.36)\ndatasets.append('/ST_tW_antitop_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('30'); sampleN_short.append('sT')\nMCxsections.append(19.5545)\ndatasets.append('/ST_tW_top_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('31'); sampleN_short.append('sT')\nMCxsections.append(19.5545)\n# W + Jets\ndatasets.append('/WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('32'); sampleN_short.append('Wjet')\nMCxsections.append(61526.7)\ndatasets.append('/WJetsToLNu_HT-100To200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')\nNumSample.append('33'); sampleN_short.append('Wjet')\nMCxsections.append(1627.45)\n\ndatasets.append('/WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')\nNumSample.append('34'); sampleN_short.append('Wjet')\nMCxsections.append(435.237)\ndatasets.append('/WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('35'); sampleN_short.append('Wjet')\nMCxsections.append(59.181)\n#FIXME\n#datasets.append('/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\n#NumSample.append('36'); sampleN_short.append('Wjet')### not available now\nMCxsections.append(14.58)\ndatasets.append('/WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('37'); sampleN_short.append('Wjet')\nMCxsections.append(6.656)\ndatasets.append('/WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('38'); sampleN_short.append('Wjet')\nMCxsections.append(1.608)\ndatasets.append('/WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/MINIAODSIM')\nNumSample.append('39'); sampleN_short.append('Wjet')\nMCxsections.append(0.0389)\n# tt + V\ndatasets.append('/TTWJetsToQQ_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('40'); sampleN_short.append('ttV')\nMCxsections.append(0.4062)\ndatasets.append('/TTWJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/MINIAODSIM')\nNumSample.append('41'); sampleN_short.append('ttV')\nMCxsections.append(0.2043)\ndatasets.append('/TTZToQQ_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM')\nNumSample.append('42'); sampleN_short.append('ttV')\nMCxsections.append(0.5297)\ndatasets.append('/TTZToLLNuNu_M-10_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext3-v1/MINIAODSIM')\nNumSample.append('43'); sampleN_short.append('ttV')\nMCxsections.append(0.2529)\n\nalljobtypes = set(sampleN_short)\nfor job in alljobtypes:\n localdirs[job] = []\n\nfor ijob, job in enumerate(datasets):\n nsample = int(NumSample[ijob])\n jobtype = sampleN_short[ijob]\n dataname = \"\"\n datadir = \" \"\n #print \"nsample \",nsample, \" jobtype \",jobtype\n if nsample < 0:\n datadir = sampleN_short[ijob]\n\tdataname = job\n #print \"real data nsample \",nsample, \" datadir \",datadir\n elif nsample > 0:\n datadir = job.split('/')[1]\n #print \"MC nsample \",nsample, \" datadir \",datadir, \"MiniAOD dataset \",job.split('/')\n\t#query = \"dataset dataset=/%s/*/NANOAODSIM\"%(datadir)\n #pdata = os.popen(\"dasgoclient -limit=0 -query='{query}'\".format(query = query))\t\n #founddataset = False\n\t#for line in pdata:\n\t# #print \"dataset \",line,\" datatype \",datadir\n\t# if datadir in line:\n\t# founddataset = True\n\t# dataname = line[:-1]\t\n\t#if not(founddataset): \n\t# print \"WARNING!!!!! no dataset found for \",datadir\n localdirs[jobtype].append(os.path.join(fdatadir, datadir))\n\nNanodatasets.append(\"/DoubleEG/Run2016B-05Feb2018_ver1-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016B-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016C-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016D-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016E-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016F-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016G-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016H-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleEG/Run2016H-05Feb2018_ver3-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016B-05Feb2018_ver1-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016B-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016C-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016D-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016E-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016F-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016G-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016H-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/DoubleMuon/Run2016H-05Feb2018_ver3-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016B-05Feb2018_ver1-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016B-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016C-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016D-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016E-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016F-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016G-05Feb2018-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016H-05Feb2018_ver2-v1/NANOAOD\")\nNanodatasets.append(\"/MuonEG/Run2016H-05Feb2018_ver3-v1/NANOAOD\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-260_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-270_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-300_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-350_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-400_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-450_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-500_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-550_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-600_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-650_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-750_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-800_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/GluGluToRadionToHHTo2B2VTo2L2Nu_M-900_narrow_13TeV-madgraph-v2/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\n#TTbar\n#Nanodatasets.append(\"/TTTo2L2Nu_TuneCP5_13TeV-powheg-pythia8/arizzi-RunIIFall17MiniAOD-94X-Nano01Fall17-e273b12d9f89d622a34e4bc98b05ee29/USER\")\nNanodatasets.append('/TTTo2L2Nu_TuneCUETP8M2_ttHtranche3_13TeV-powheg-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM')\n\n# DY\nNanodatasets.append(\"/DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/DYToLL_0J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\nNanodatasets.append(\"/DYToLL_1J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\nNanodatasets.append(\"/DYToLL_2J_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\n# VV\nNanodatasets.append(\"/ZZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ZZTo2L2Nu_13TeV_powheg_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ZZTo4L_13TeV_powheg_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\n#Nanodatasets.append(\"/WWToLNuQQ_aTGC_13TeV-madgraph-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WZTo2L2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WZTo1L1Nu2Q_13TeV_amcatnloFXFX_madspin_pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\n#sT\nNanodatasets.append(\"/ST_t-channel_top_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ST_tW_antitop_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/ST_tW_top_5f_NoFullyHadronicDecays_13TeV-powheg_TuneCUETP8M1/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\n#W+jets\nNanodatasets.append(\"/WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-100To200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-400To600_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-800To1200_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/WJetsToLNu_HT-2500ToInf_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext1-v1/NANOAODSIM\")\n# tt + V\nNanodatasets.append(\"/TTWJetsToQQ_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/TTWJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-madspin-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext2-v1/NANOAODSIM\")\nNanodatasets.append(\"/TTZToQQ_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2-v1/NANOAODSIM\")\nNanodatasets.append(\"/TTZToLLNuNu_M-10_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16NanoAOD-PUMoriond17_05Feb2018_94X_mcRun2_asymptotic_v2_ext3-v1/NANOAODSIM\")\n\n\n\noutAnalist = {}\noutAnadir = \"/fdata/hepx/store/user/taohuang/HHNtuple_20180328_fixedleptonDZeff/\"\nfor i,datasetname in enumerate( Nanodatasets ):\n sampleName = sampleN_short[i]\n if NumSample[i] < 0:\n \tsampleName = \"Data\"\n outAnafile = os.path.join(outAnadir, Nanodatasets[i].split('/')[1])\n if hasattr(outAnalist, sampleName):\n\toutAnalist[sampleName].append(outAnafile)\n else:\n\toutAnalist[sampleName] = []\n\toutAnalist[sampleName].append(outAnafile)\n\ndataintxt = open(\"2016MCSamplelist.txt\",\"w+\")\nfor dataset in datasets:\n dataintxt.write(dataset+\"\\n\")\ndataintxt.close()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#!/usr/bin/env python
'''
State Machine for the Flare task
'''
import roslib
import rospy
import actionlib
from rospy.timer import sleep
import smach
import smach_ros
from dynamic_reconfigure.server import Server
import math
import os
import sys
import numpy as np
from bbauv_msgs.msg import *
from bbauv_msgs.srv import *
from flare_vision import Flare
#Global variables
isStart = False
isEnd = False
isTestMode = False #If test mode then don't wait for mission call
rosRate = None
flare = None
VisionLoopCount = 0 #Counter for number of times the image is being processed
flareSeen = False
mani_pub = None
movement_client = None
locomotionGoal = None
flare_params = {'flare_area':0, 'centering_x':0, 'centering_y':0}
#Starts off in disengage class
class Disengage(smach.State):
def __init__(self, flare_task):
smach.State.__init__(self, outcomes=['start_complete', 'complete_outcome', 'aborted'])
self.flare = flare_task
def execute(self, userdata):
# self.flare.unregister()
if self.flare.isKilled:
rospy.signal_shutdown("Bye")
return 'aborted'
while self.flare.isAborted:
rospy.sleep(rospy.Duration(0.2))
if self.flare.testing:
self.flare.register()
rospy.loginfo("Starting Flare")
return 'start_complete'
#Searches for the flare
class Search(smach.State):
timeout = 10000 #5s timeout before aborting task
def __init__(self, flare_task):
smach.State.__init__(self, outcomes=['search_complete', 'aborted', 'mission_abort'])
self.flare = flare_task
if self.flare.testing:
self.flare.unregisterHeading()
#rospy.loginfo(self.flare.curHeading)
def execute(self, userdata):
#Check for abort signal
if self.flare.isAborted:
rospy.signal_shutdown("Bye!")
return 'aborted'
#Check if flare found or timeout already
timecount = 0
while not self.flare.rectData['detected']:
if timecount > self.timeout or rospy.is_shutdown() or self.flare.isKilled:
self.flare.abortMission()
self.flare.failedTask();
return 'aborted'
self.flare.sendMovement(forward=1.0)
rospy.sleep(rospy.Duration(0.5))
timecount += 1
return 'search_complete'
#Bash towards the flare!
class Manuoevre(smach.State):
def __init__(self, flare_task):
smach.State.__init__(self, outcomes=['manuoevring', 'manuoevre_complete',
'aborted', 'mission_abort'])
self.flare = flare_task
self.deltaThresh = 0.15
self.prevAngle = []
self.count = 0
self.flareSeen = True
def execute(self,userdata):
#Check for aborted signal
if self.flare.isAborted:
rospy.signal_shutdown("Bye!")
return 'aborted'
# #Cannot detect already
# if not self.flare.rectData['detected']:
# self.count += 1
# if self.count > 4:
# self.flare.taskComplete()
# return 'manuoevre_complete'
# if not self.flare.rectData['detected'] and self.flareSeen:
# self.flare.sendMovement(forward=2.0)
# rospy.sleep(rospy.Duration(3))
# self.flare.taskComplete()
# return 'manuoevre_complete'
#Get to the flare
screenWidth = self.flare.screen['width']
screenCenterX = screenWidth / 2
deltaX = (self.flare.rectData['centroids'][0] - screenCenterX) / screenWidth
#rospy.loginfo("Delta X {}".format(deltaX))
rospy.loginfo("Area {}".format(self.flare.rectData['area']))
#Forward if center
rospy.loginfo("Delta X: {}".format(deltaX))
if abs(deltaX) < 0.15:
self.flare.sendMovement(forward=self.flare.forwardOffset)
rospy.sleep(rospy.Duration(0.5))
else:
#Sidemove if too far off center
sidemove = math.copysign(deltaX*self.flare.deltaXMultiplier, deltaX) #Random number
# sidemove = math.copysign(0.5, deltaX)
self.flare.sendMovement(forward=0.10, sidemove=sidemove)
rospy.sleep(rospy.Duration(0.5))
#Shoot straight and aim
if self.flare.rectData['area'] > self.flare.headOnArea:
return 'manuoevre_complete'
return 'manuoevring'
#return 'manuoevre_complete'
class Completing(smach.State):
def __init__(self, flare_task):
smach.State.__init__(self, outcomes=['complete_complete', 'completing',
'aborted', 'mission_abort'])
self.flare = flare_task
self.count = 0
def execute(self,userdata):
#Check for aborted signal
if self.flare.isAborted:
self.flare.isKilled = True
rospy.signal_shutdown("Bye!")
return 'aborted'
screenWidth = self.flare.screen['width']
screenCenterX = screenWidth / 2
deltaX = (self.flare.rectData['centroids'][0] - screenCenterX) / screenWidth
deltaXMult =2.0
rospy.loginfo("Delta X:{}".format(deltaX))
if abs(deltaX) < 0.03:
self.count += 1
rospy.loginfo("Count: {}".format(self.count))
return 'completing'
if self.count >= 2000:
self.flare.sendMovement(forward=4.0)
rospy.loginfo("Hitting the flare")
self.flare.locomotionClient.wait_for_result()
self.flare.sendMovement(forward=-2.0) #Retract
self.flare.locomotionClient.wait_for_result()
self.flare.taskComplete()
return 'complete_complete'
else:
self.count = 0
sidemove = math.copysign(deltaX*deltaXMult, deltaX) #Random number
self.flare.sendMovement(forward=0.00, sidemove=sidemove)
rospy.sleep(rospy.Duration(0.5))
return 'completing'
#self.flare.taskComplete()
#return 'complete_complete'
'''
Main python thread
'''
def handle_srv(req):
global isStart
global isAbort
global locomotionGoal
global flare
rospy.loginfo("Flare service handled")
if req.start_request:
rospy.loginfo("Flare is Start")
isStart = True
isAbort = False
#locomotionGoal = req.start_ctrl
if req.abort_reqest:
rospy.loginfo("Flare abort received")
isAbort = True
isStart = False
flare.unregister()
#To fill accordingly
return mission_to_visionResponse(isStart, isAbort)
#Param config callback
def flareCallback(conig, level):
for param in flare.yellow_params:
flare.yellow_params[param] = config['yellow_' + param]
isTestMode = config["testing"]
return config
#Utility function for normalising heading
def normHeading(heading):
if heading > 360:
return heading - 360
elif heading < 0:
return heading + 360
else:
return heading
if __name__ == '__main__':
rospy.init_node("Flare", anonymous=False)
rosRate = rospy.Rate(20)
flare_task = Flare()
rospy.loginfo("Flare loaded!")
#Create state machine container
sm = smach.StateMachine(outcomes=['complete_flare', 'aborted'])
#Disengage, Search, Manuoevre
with sm:
smach.StateMachine.add("DISENGAGE", Disengage(flare_task),
transitions={'start_complete': "SEARCH",
'complete_outcome': 'complete_flare',
'aborted': 'aborted'})
smach.StateMachine.add("SEARCH", Search(flare_task),
transitions={'search_complete': "MANUOEVRE", 'aborted': 'aborted',
'mission_abort': "DISENGAGE"})
smach.StateMachine.add("MANUOEVRE", Manuoevre(flare_task),
transitions = {'manuoevring': "MANUOEVRE",
'manuoevre_complete': "COMPLETING",
'aborted': 'aborted',
'mission_abort': "DISENGAGE"})
smach.StateMachine.add("COMPLETING", Completing(flare_task),
transitions = {'complete_complete': "DISENGAGE",
'completing': "COMPLETING",
'aborted': 'aborted',
'mission_abort': "DISENGAGE"})
sis = smach_ros.IntrospectionServer('flare_task', sm, '/SM_ROOT')
sis.start()
outcomes = sm.execute()
#wait for ctrl-c
rospy.spin()
sis.stop()
|
normal
|
{
"blob_id": "0bb2a6ebbf75fae3466c34a435a531fabdc07f62",
"index": 2984,
"step-1": "<mask token>\n\n\nclass Disengage(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['start_complete',\n 'complete_outcome', 'aborted'])\n self.flare = flare_task\n <mask token>\n\n\nclass Search(smach.State):\n timeout = 10000\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['search_complete', 'aborted',\n 'mission_abort'])\n self.flare = flare_task\n if self.flare.testing:\n self.flare.unregisterHeading()\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n timecount = 0\n while not self.flare.rectData['detected']:\n if timecount > self.timeout or rospy.is_shutdown(\n ) or self.flare.isKilled:\n self.flare.abortMission()\n self.flare.failedTask()\n return 'aborted'\n self.flare.sendMovement(forward=1.0)\n rospy.sleep(rospy.Duration(0.5))\n timecount += 1\n return 'search_complete'\n\n\nclass Manuoevre(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['manuoevring',\n 'manuoevre_complete', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.deltaThresh = 0.15\n self.prevAngle = []\n self.count = 0\n self.flareSeen = True\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n rospy.loginfo('Area {}'.format(self.flare.rectData['area']))\n rospy.loginfo('Delta X: {}'.format(deltaX))\n if abs(deltaX) < 0.15:\n self.flare.sendMovement(forward=self.flare.forwardOffset)\n rospy.sleep(rospy.Duration(0.5))\n else:\n sidemove = math.copysign(deltaX * self.flare.deltaXMultiplier,\n deltaX)\n self.flare.sendMovement(forward=0.1, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n if self.flare.rectData['area'] > self.flare.headOnArea:\n return 'manuoevre_complete'\n return 'manuoevring'\n\n\nclass Completing(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['complete_complete',\n 'completing', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.count = 0\n\n def execute(self, userdata):\n if self.flare.isAborted:\n self.flare.isKilled = True\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n deltaXMult = 2.0\n rospy.loginfo('Delta X:{}'.format(deltaX))\n if abs(deltaX) < 0.03:\n self.count += 1\n rospy.loginfo('Count: {}'.format(self.count))\n return 'completing'\n if self.count >= 2000:\n self.flare.sendMovement(forward=4.0)\n rospy.loginfo('Hitting the flare')\n self.flare.locomotionClient.wait_for_result()\n self.flare.sendMovement(forward=-2.0)\n self.flare.locomotionClient.wait_for_result()\n self.flare.taskComplete()\n return 'complete_complete'\n else:\n self.count = 0\n sidemove = math.copysign(deltaX * deltaXMult, deltaX)\n self.flare.sendMovement(forward=0.0, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n return 'completing'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Disengage(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['start_complete',\n 'complete_outcome', 'aborted'])\n self.flare = flare_task\n\n def execute(self, userdata):\n if self.flare.isKilled:\n rospy.signal_shutdown('Bye')\n return 'aborted'\n while self.flare.isAborted:\n rospy.sleep(rospy.Duration(0.2))\n if self.flare.testing:\n self.flare.register()\n rospy.loginfo('Starting Flare')\n return 'start_complete'\n\n\nclass Search(smach.State):\n timeout = 10000\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['search_complete', 'aborted',\n 'mission_abort'])\n self.flare = flare_task\n if self.flare.testing:\n self.flare.unregisterHeading()\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n timecount = 0\n while not self.flare.rectData['detected']:\n if timecount > self.timeout or rospy.is_shutdown(\n ) or self.flare.isKilled:\n self.flare.abortMission()\n self.flare.failedTask()\n return 'aborted'\n self.flare.sendMovement(forward=1.0)\n rospy.sleep(rospy.Duration(0.5))\n timecount += 1\n return 'search_complete'\n\n\nclass Manuoevre(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['manuoevring',\n 'manuoevre_complete', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.deltaThresh = 0.15\n self.prevAngle = []\n self.count = 0\n self.flareSeen = True\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n rospy.loginfo('Area {}'.format(self.flare.rectData['area']))\n rospy.loginfo('Delta X: {}'.format(deltaX))\n if abs(deltaX) < 0.15:\n self.flare.sendMovement(forward=self.flare.forwardOffset)\n rospy.sleep(rospy.Duration(0.5))\n else:\n sidemove = math.copysign(deltaX * self.flare.deltaXMultiplier,\n deltaX)\n self.flare.sendMovement(forward=0.1, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n if self.flare.rectData['area'] > self.flare.headOnArea:\n return 'manuoevre_complete'\n return 'manuoevring'\n\n\nclass Completing(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['complete_complete',\n 'completing', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.count = 0\n\n def execute(self, userdata):\n if self.flare.isAborted:\n self.flare.isKilled = True\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n deltaXMult = 2.0\n rospy.loginfo('Delta X:{}'.format(deltaX))\n if abs(deltaX) < 0.03:\n self.count += 1\n rospy.loginfo('Count: {}'.format(self.count))\n return 'completing'\n if self.count >= 2000:\n self.flare.sendMovement(forward=4.0)\n rospy.loginfo('Hitting the flare')\n self.flare.locomotionClient.wait_for_result()\n self.flare.sendMovement(forward=-2.0)\n self.flare.locomotionClient.wait_for_result()\n self.flare.taskComplete()\n return 'complete_complete'\n else:\n self.count = 0\n sidemove = math.copysign(deltaX * deltaXMult, deltaX)\n self.flare.sendMovement(forward=0.0, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n return 'completing'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Disengage(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['start_complete',\n 'complete_outcome', 'aborted'])\n self.flare = flare_task\n\n def execute(self, userdata):\n if self.flare.isKilled:\n rospy.signal_shutdown('Bye')\n return 'aborted'\n while self.flare.isAborted:\n rospy.sleep(rospy.Duration(0.2))\n if self.flare.testing:\n self.flare.register()\n rospy.loginfo('Starting Flare')\n return 'start_complete'\n\n\nclass Search(smach.State):\n timeout = 10000\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['search_complete', 'aborted',\n 'mission_abort'])\n self.flare = flare_task\n if self.flare.testing:\n self.flare.unregisterHeading()\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n timecount = 0\n while not self.flare.rectData['detected']:\n if timecount > self.timeout or rospy.is_shutdown(\n ) or self.flare.isKilled:\n self.flare.abortMission()\n self.flare.failedTask()\n return 'aborted'\n self.flare.sendMovement(forward=1.0)\n rospy.sleep(rospy.Duration(0.5))\n timecount += 1\n return 'search_complete'\n\n\nclass Manuoevre(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['manuoevring',\n 'manuoevre_complete', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.deltaThresh = 0.15\n self.prevAngle = []\n self.count = 0\n self.flareSeen = True\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n rospy.loginfo('Area {}'.format(self.flare.rectData['area']))\n rospy.loginfo('Delta X: {}'.format(deltaX))\n if abs(deltaX) < 0.15:\n self.flare.sendMovement(forward=self.flare.forwardOffset)\n rospy.sleep(rospy.Duration(0.5))\n else:\n sidemove = math.copysign(deltaX * self.flare.deltaXMultiplier,\n deltaX)\n self.flare.sendMovement(forward=0.1, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n if self.flare.rectData['area'] > self.flare.headOnArea:\n return 'manuoevre_complete'\n return 'manuoevring'\n\n\nclass Completing(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['complete_complete',\n 'completing', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.count = 0\n\n def execute(self, userdata):\n if self.flare.isAborted:\n self.flare.isKilled = True\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n deltaXMult = 2.0\n rospy.loginfo('Delta X:{}'.format(deltaX))\n if abs(deltaX) < 0.03:\n self.count += 1\n rospy.loginfo('Count: {}'.format(self.count))\n return 'completing'\n if self.count >= 2000:\n self.flare.sendMovement(forward=4.0)\n rospy.loginfo('Hitting the flare')\n self.flare.locomotionClient.wait_for_result()\n self.flare.sendMovement(forward=-2.0)\n self.flare.locomotionClient.wait_for_result()\n self.flare.taskComplete()\n return 'complete_complete'\n else:\n self.count = 0\n sidemove = math.copysign(deltaX * deltaXMult, deltaX)\n self.flare.sendMovement(forward=0.0, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n return 'completing'\n\n\n<mask token>\n\n\ndef flareCallback(conig, level):\n for param in flare.yellow_params:\n flare.yellow_params[param] = config['yellow_' + param]\n isTestMode = config['testing']\n return config\n\n\ndef normHeading(heading):\n if heading > 360:\n return heading - 360\n elif heading < 0:\n return heading + 360\n else:\n return heading\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Disengage(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['start_complete',\n 'complete_outcome', 'aborted'])\n self.flare = flare_task\n\n def execute(self, userdata):\n if self.flare.isKilled:\n rospy.signal_shutdown('Bye')\n return 'aborted'\n while self.flare.isAborted:\n rospy.sleep(rospy.Duration(0.2))\n if self.flare.testing:\n self.flare.register()\n rospy.loginfo('Starting Flare')\n return 'start_complete'\n\n\nclass Search(smach.State):\n timeout = 10000\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['search_complete', 'aborted',\n 'mission_abort'])\n self.flare = flare_task\n if self.flare.testing:\n self.flare.unregisterHeading()\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n timecount = 0\n while not self.flare.rectData['detected']:\n if timecount > self.timeout or rospy.is_shutdown(\n ) or self.flare.isKilled:\n self.flare.abortMission()\n self.flare.failedTask()\n return 'aborted'\n self.flare.sendMovement(forward=1.0)\n rospy.sleep(rospy.Duration(0.5))\n timecount += 1\n return 'search_complete'\n\n\nclass Manuoevre(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['manuoevring',\n 'manuoevre_complete', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.deltaThresh = 0.15\n self.prevAngle = []\n self.count = 0\n self.flareSeen = True\n\n def execute(self, userdata):\n if self.flare.isAborted:\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n rospy.loginfo('Area {}'.format(self.flare.rectData['area']))\n rospy.loginfo('Delta X: {}'.format(deltaX))\n if abs(deltaX) < 0.15:\n self.flare.sendMovement(forward=self.flare.forwardOffset)\n rospy.sleep(rospy.Duration(0.5))\n else:\n sidemove = math.copysign(deltaX * self.flare.deltaXMultiplier,\n deltaX)\n self.flare.sendMovement(forward=0.1, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n if self.flare.rectData['area'] > self.flare.headOnArea:\n return 'manuoevre_complete'\n return 'manuoevring'\n\n\nclass Completing(smach.State):\n\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['complete_complete',\n 'completing', 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.count = 0\n\n def execute(self, userdata):\n if self.flare.isAborted:\n self.flare.isKilled = True\n rospy.signal_shutdown('Bye!')\n return 'aborted'\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX\n ) / screenWidth\n deltaXMult = 2.0\n rospy.loginfo('Delta X:{}'.format(deltaX))\n if abs(deltaX) < 0.03:\n self.count += 1\n rospy.loginfo('Count: {}'.format(self.count))\n return 'completing'\n if self.count >= 2000:\n self.flare.sendMovement(forward=4.0)\n rospy.loginfo('Hitting the flare')\n self.flare.locomotionClient.wait_for_result()\n self.flare.sendMovement(forward=-2.0)\n self.flare.locomotionClient.wait_for_result()\n self.flare.taskComplete()\n return 'complete_complete'\n else:\n self.count = 0\n sidemove = math.copysign(deltaX * deltaXMult, deltaX)\n self.flare.sendMovement(forward=0.0, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n return 'completing'\n\n\n<mask token>\n\n\ndef handle_srv(req):\n global isStart\n global isAbort\n global locomotionGoal\n global flare\n rospy.loginfo('Flare service handled')\n if req.start_request:\n rospy.loginfo('Flare is Start')\n isStart = True\n isAbort = False\n if req.abort_reqest:\n rospy.loginfo('Flare abort received')\n isAbort = True\n isStart = False\n flare.unregister()\n return mission_to_visionResponse(isStart, isAbort)\n\n\ndef flareCallback(conig, level):\n for param in flare.yellow_params:\n flare.yellow_params[param] = config['yellow_' + param]\n isTestMode = config['testing']\n return config\n\n\ndef normHeading(heading):\n if heading > 360:\n return heading - 360\n elif heading < 0:\n return heading + 360\n else:\n return heading\n\n\n<mask token>\n",
"step-5": "#!/usr/bin/env python\n'''\nState Machine for the Flare task\n'''\n\nimport roslib\nimport rospy\nimport actionlib\nfrom rospy.timer import sleep\n\nimport smach\nimport smach_ros\n\nfrom dynamic_reconfigure.server import Server\n\nimport math\nimport os\nimport sys\n\n\nimport numpy as np\n\nfrom bbauv_msgs.msg import *\nfrom bbauv_msgs.srv import *\nfrom flare_vision import Flare\n\n#Global variables \nisStart = False\nisEnd = False\nisTestMode = False #If test mode then don't wait for mission call \nrosRate = None \nflare = None\nVisionLoopCount = 0 #Counter for number of times the image is being processed\nflareSeen = False\n\nmani_pub = None\nmovement_client = None\nlocomotionGoal = None\n\nflare_params = {'flare_area':0, 'centering_x':0, 'centering_y':0}\n\n\n#Starts off in disengage class\nclass Disengage(smach.State):\n \n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['start_complete', 'complete_outcome', 'aborted'])\n self.flare = flare_task\n \n def execute(self, userdata):\n# self.flare.unregister()\n\n if self.flare.isKilled:\n rospy.signal_shutdown(\"Bye\")\n return 'aborted'\n\n while self.flare.isAborted:\n rospy.sleep(rospy.Duration(0.2))\n \n if self.flare.testing:\n self.flare.register()\n rospy.loginfo(\"Starting Flare\")\n \n return 'start_complete'\n \n#Searches for the flare\nclass Search(smach.State):\n timeout = 10000 #5s timeout before aborting task\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['search_complete', 'aborted', 'mission_abort'])\n self.flare = flare_task\n \n if self.flare.testing:\n self.flare.unregisterHeading()\n #rospy.loginfo(self.flare.curHeading)\n \n def execute(self, userdata):\n #Check for abort signal\n if self.flare.isAborted:\n rospy.signal_shutdown(\"Bye!\")\n return 'aborted'\n \n #Check if flare found or timeout already\n timecount = 0\n while not self.flare.rectData['detected']:\n if timecount > self.timeout or rospy.is_shutdown() or self.flare.isKilled:\n self.flare.abortMission()\n self.flare.failedTask();\n return 'aborted'\n self.flare.sendMovement(forward=1.0)\n rospy.sleep(rospy.Duration(0.5))\n timecount += 1\n \n return 'search_complete'\n\n#Bash towards the flare!\nclass Manuoevre(smach.State):\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['manuoevring', 'manuoevre_complete',\n 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.deltaThresh = 0.15\n self.prevAngle = []\n self.count = 0\n self.flareSeen = True\n \n def execute(self,userdata):\n #Check for aborted signal\n if self.flare.isAborted:\n rospy.signal_shutdown(\"Bye!\")\n return 'aborted'\n \n# #Cannot detect already\n# if not self.flare.rectData['detected']:\n# self.count += 1\n# if self.count > 4:\n# self.flare.taskComplete()\n# return 'manuoevre_complete'\n \n# if not self.flare.rectData['detected'] and self.flareSeen:\n# self.flare.sendMovement(forward=2.0)\n# rospy.sleep(rospy.Duration(3))\n# self.flare.taskComplete()\n# return 'manuoevre_complete'\n \n #Get to the flare\n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX) / screenWidth\n #rospy.loginfo(\"Delta X {}\".format(deltaX))\n rospy.loginfo(\"Area {}\".format(self.flare.rectData['area']))\n \n #Forward if center\n rospy.loginfo(\"Delta X: {}\".format(deltaX))\n if abs(deltaX) < 0.15:\n self.flare.sendMovement(forward=self.flare.forwardOffset)\n rospy.sleep(rospy.Duration(0.5))\n else:\n #Sidemove if too far off center\n sidemove = math.copysign(deltaX*self.flare.deltaXMultiplier, deltaX) #Random number\n# sidemove = math.copysign(0.5, deltaX)\n self.flare.sendMovement(forward=0.10, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n \n #Shoot straight and aim\n if self.flare.rectData['area'] > self.flare.headOnArea:\n return 'manuoevre_complete'\n \n return 'manuoevring'\n\n #return 'manuoevre_complete'\n \nclass Completing(smach.State):\n def __init__(self, flare_task):\n smach.State.__init__(self, outcomes=['complete_complete', 'completing',\n 'aborted', 'mission_abort'])\n self.flare = flare_task\n self.count = 0\n \n def execute(self,userdata):\n #Check for aborted signal\n if self.flare.isAborted:\n self.flare.isKilled = True\n rospy.signal_shutdown(\"Bye!\")\n return 'aborted'\n \n screenWidth = self.flare.screen['width']\n screenCenterX = screenWidth / 2\n deltaX = (self.flare.rectData['centroids'][0] - screenCenterX) / screenWidth\n \n deltaXMult =2.0\n rospy.loginfo(\"Delta X:{}\".format(deltaX))\n \n if abs(deltaX) < 0.03:\n self.count += 1\n rospy.loginfo(\"Count: {}\".format(self.count))\n return 'completing'\n \n if self.count >= 2000:\n self.flare.sendMovement(forward=4.0)\n rospy.loginfo(\"Hitting the flare\")\n self.flare.locomotionClient.wait_for_result()\n self.flare.sendMovement(forward=-2.0) #Retract\n self.flare.locomotionClient.wait_for_result()\n self.flare.taskComplete()\n return 'complete_complete'\n \n else:\n self.count = 0\n sidemove = math.copysign(deltaX*deltaXMult, deltaX) #Random number\n self.flare.sendMovement(forward=0.00, sidemove=sidemove)\n rospy.sleep(rospy.Duration(0.5))\n return 'completing'\n\n #self.flare.taskComplete()\n #return 'complete_complete'\n\n'''\nMain python thread\n'''\n \ndef handle_srv(req):\n global isStart\n global isAbort\n global locomotionGoal\n global flare\n \n rospy.loginfo(\"Flare service handled\")\n \n if req.start_request:\n rospy.loginfo(\"Flare is Start\")\n isStart = True\n isAbort = False \n #locomotionGoal = req.start_ctrl\n if req.abort_reqest:\n rospy.loginfo(\"Flare abort received\")\n isAbort = True\n isStart = False\n flare.unregister()\n \n #To fill accordingly\n return mission_to_visionResponse(isStart, isAbort)\n \n#Param config callback\ndef flareCallback(conig, level):\n for param in flare.yellow_params:\n flare.yellow_params[param] = config['yellow_' + param]\n isTestMode = config[\"testing\"]\n return config\n\n#Utility function for normalising heading \ndef normHeading(heading):\n if heading > 360:\n return heading - 360\n elif heading < 0:\n return heading + 360\n else:\n return heading \n\nif __name__ == '__main__':\n rospy.init_node(\"Flare\", anonymous=False)\n rosRate = rospy.Rate(20)\n flare_task = Flare()\n rospy.loginfo(\"Flare loaded!\")\n \n #Create state machine container \n sm = smach.StateMachine(outcomes=['complete_flare', 'aborted'])\n \n #Disengage, Search, Manuoevre\n with sm:\n smach.StateMachine.add(\"DISENGAGE\", Disengage(flare_task),\n transitions={'start_complete': \"SEARCH\", \n 'complete_outcome': 'complete_flare', \n 'aborted': 'aborted'})\n \n smach.StateMachine.add(\"SEARCH\", Search(flare_task),\n transitions={'search_complete': \"MANUOEVRE\", 'aborted': 'aborted', \n 'mission_abort': \"DISENGAGE\"})\n \n smach.StateMachine.add(\"MANUOEVRE\", Manuoevre(flare_task),\n transitions = {'manuoevring': \"MANUOEVRE\",\n 'manuoevre_complete': \"COMPLETING\",\n 'aborted': 'aborted',\n 'mission_abort': \"DISENGAGE\"})\n \n smach.StateMachine.add(\"COMPLETING\", Completing(flare_task),\n transitions = {'complete_complete': \"DISENGAGE\",\n 'completing': \"COMPLETING\",\n 'aborted': 'aborted',\n 'mission_abort': \"DISENGAGE\"})\n \n sis = smach_ros.IntrospectionServer('flare_task', sm, '/SM_ROOT')\n sis.start()\n outcomes = sm.execute()\n \n #wait for ctrl-c\n rospy.spin()\n sis.stop()\n \n",
"step-ids": [
12,
13,
15,
16,
20
]
}
|
[
12,
13,
15,
16,
20
] |
"""
Given two strings A and B of lowercase letters, return true
if and only if we can swap two letters in A so that the result
equals B.
Example 1:
Input: A = "ab", B = "ba"
Output: true
"""
class Solution:
def buddyStrings(self, A: str, B: str) -> bool:
if len(A) != len(B):
return False
if A == B and len(A) > len(set(A)):
return True
re1 = ""
re2 = ""
for i in range(len(A)):
if A[i] != B[i]:
re1 += A[i]
re2 += B[i]
if len(re1) == len(re2) == 2 and re1 == re2[::-1]:
return True
return False
|
normal
|
{
"blob_id": "dd902f99ee8dc23f56641b8e75544a2d4576c19a",
"index": 4437,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution:\n\n def buddyStrings(self, A: str, B: str) ->bool:\n if len(A) != len(B):\n return False\n if A == B and len(A) > len(set(A)):\n return True\n re1 = ''\n re2 = ''\n for i in range(len(A)):\n if A[i] != B[i]:\n re1 += A[i]\n re2 += B[i]\n if len(re1) == len(re2) == 2 and re1 == re2[::-1]:\n return True\n return False\n",
"step-4": "\"\"\"\nGiven two strings A and B of lowercase letters, return true \nif and only if we can swap two letters in A so that the result \nequals B.\n\n Example 1:\n\n Input: A = \"ab\", B = \"ba\"\n Output: true\n\"\"\"\n\nclass Solution:\n def buddyStrings(self, A: str, B: str) -> bool:\n if len(A) != len(B):\n return False\n \n if A == B and len(A) > len(set(A)):\n return True\n \n re1 = \"\"\n re2 = \"\"\n for i in range(len(A)):\n if A[i] != B[i]:\n re1 += A[i]\n re2 += B[i] \n \n if len(re1) == len(re2) == 2 and re1 == re2[::-1]: \n return True\n \n return False\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def func(i):
if(i % 2 != 0): return False
visited = [0,0,0,0,0,0,0,0,0,0]
temp = i
while(i):
x = i%10
if (visited[x] == 1) or (x == 0): break
visited[x] = 1;
i = (int)(i / 10);
if(i == 0):
for y in str(temp):
if(temp % int(y) != 0): return False
else: return False
return True
n,m = map(int, input().split())
print(sum([1 for i in range(n,m) if func(i)]))
|
normal
|
{
"blob_id": "1a8c9be389aad37a36630a962c20a0a36c449bdd",
"index": 3809,
"step-1": "<mask token>\n",
"step-2": "def func(i):\n if i % 2 != 0:\n return False\n visited = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n temp = i\n while i:\n x = i % 10\n if visited[x] == 1 or x == 0:\n break\n visited[x] = 1\n i = int(i / 10)\n if i == 0:\n for y in str(temp):\n if temp % int(y) != 0:\n return False\n else:\n return False\n return True\n\n\n<mask token>\n",
"step-3": "def func(i):\n if i % 2 != 0:\n return False\n visited = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n temp = i\n while i:\n x = i % 10\n if visited[x] == 1 or x == 0:\n break\n visited[x] = 1\n i = int(i / 10)\n if i == 0:\n for y in str(temp):\n if temp % int(y) != 0:\n return False\n else:\n return False\n return True\n\n\n<mask token>\nprint(sum([(1) for i in range(n, m) if func(i)]))\n",
"step-4": "def func(i):\n if i % 2 != 0:\n return False\n visited = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n temp = i\n while i:\n x = i % 10\n if visited[x] == 1 or x == 0:\n break\n visited[x] = 1\n i = int(i / 10)\n if i == 0:\n for y in str(temp):\n if temp % int(y) != 0:\n return False\n else:\n return False\n return True\n\n\nn, m = map(int, input().split())\nprint(sum([(1) for i in range(n, m) if func(i)]))\n",
"step-5": "def func(i):\r\n if(i % 2 != 0): return False\r\n visited = [0,0,0,0,0,0,0,0,0,0]\r\n temp = i\r\n while(i):\r\n x = i%10\r\n if (visited[x] == 1) or (x == 0): break\r\n visited[x] = 1; \r\n i = (int)(i / 10); \r\n\r\n if(i == 0):\r\n for y in str(temp):\r\n if(temp % int(y) != 0): return False\r\n\r\n else: return False\r\n return True\r\n\r\nn,m = map(int, input().split())\r\n\r\nprint(sum([1 for i in range(n,m) if func(i)]))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.conf.urls import patterns, url
urlpatterns = patterns(
'',
url(
r'^create_new/$',
'hx_lti_assignment.views.create_new_assignment',
name="create_new_assignment",
),
url(
r'^(?P<id>[0-9]+)/edit/',
'hx_lti_assignment.views.edit_assignment',
name="edit_assignment",
),
url(
r'^(?P<id>[0-9]+)/delete/',
'hx_lti_assignment.views.delete_assignment',
name="delete_assignment",
),
url(
r'^import_assignment/$',
'hx_lti_assignment.views.import_assignment',
name="import_assignment",
),
url(
r'^(?P<course_id>[0-9]+)/get_assignments',
'hx_lti_assignment.views.assignments_from_course',
name="assignments_from_course",
),
url(
r'^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import',
'hx_lti_assignment.views.moving_assignment',
name="moving_assignment",
),
)
|
normal
|
{
"blob_id": "2194fb4f0b0618f1c8db39f659a4890457f45b1d",
"index": 3963,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = patterns('', url('^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment', name=\n 'create_new_assignment'), url('^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment', name='edit_assignment'), url\n ('^(?P<id>[0-9]+)/delete/', 'hx_lti_assignment.views.delete_assignment',\n name='delete_assignment'), url('^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment', name='import_assignment'),\n url('^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course', name=\n 'assignments_from_course'), url(\n '^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import'\n , 'hx_lti_assignment.views.moving_assignment', name='moving_assignment'))\n",
"step-3": "from django.conf.urls import patterns, url\nurlpatterns = patterns('', url('^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment', name=\n 'create_new_assignment'), url('^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment', name='edit_assignment'), url\n ('^(?P<id>[0-9]+)/delete/', 'hx_lti_assignment.views.delete_assignment',\n name='delete_assignment'), url('^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment', name='import_assignment'),\n url('^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course', name=\n 'assignments_from_course'), url(\n '^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import'\n , 'hx_lti_assignment.views.moving_assignment', name='moving_assignment'))\n",
"step-4": "from django.conf.urls import patterns, url\n\nurlpatterns = patterns(\n '',\n url(\n r'^create_new/$',\n 'hx_lti_assignment.views.create_new_assignment',\n name=\"create_new_assignment\",\n ),\n url(\n r'^(?P<id>[0-9]+)/edit/',\n 'hx_lti_assignment.views.edit_assignment',\n name=\"edit_assignment\",\n ),\n url(\n r'^(?P<id>[0-9]+)/delete/',\n 'hx_lti_assignment.views.delete_assignment',\n name=\"delete_assignment\",\n ),\n url(\n r'^import_assignment/$',\n 'hx_lti_assignment.views.import_assignment',\n name=\"import_assignment\",\n ),\n url(\n r'^(?P<course_id>[0-9]+)/get_assignments',\n 'hx_lti_assignment.views.assignments_from_course',\n name=\"assignments_from_course\",\n ),\n url(\n r'^(?P<old_course_id>[0-9]+)/(?P<new_course_id>[0-9]+)/(?P<assignment_id>[0-9]+)/import',\n 'hx_lti_assignment.views.moving_assignment',\n name=\"moving_assignment\",\n ),\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
leetcode 338. 比特位计数
给定一个非负整数 num。对于 0 ≤ i ≤ num 范围中的每个数字 i ,计算其二进制数中的 1 的数目并将它们作为数组返回。
'''
class Solution(object):
def countBits(self, n):
"""
:type n: int
:rtype: List[int]
"""
out = [0] * (n+1)
for i in range(1,n+1,1):
if i%2==1: out[i]=out[i-1]+1
else:
out[i]=out[i>>1]
return out
|
normal
|
{
"blob_id": "4cd1e385d18086b1045b1149d5f4573eaf9270c3",
"index": 6223,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution(object):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Solution(object):\n\n def countBits(self, n):\n \"\"\"\n :type n: int\n :rtype: List[int]\n \"\"\"\n out = [0] * (n + 1)\n for i in range(1, n + 1, 1):\n if i % 2 == 1:\n out[i] = out[i - 1] + 1\n else:\n out[i] = out[i >> 1]\n return out\n",
"step-4": "'''\r\nleetcode 338. 比特位计数\r\n给定一个非负整数 num。对于 0 ≤ i ≤ num 范围中的每个数字 i ,计算其二进制数中的 1 的数目并将它们作为数组返回。\r\n'''\r\nclass Solution(object):\r\n def countBits(self, n):\r\n \"\"\"\r\n :type n: int\r\n :rtype: List[int]\r\n \"\"\"\r\n out = [0] * (n+1)\r\n for i in range(1,n+1,1):\r\n if i%2==1: out[i]=out[i-1]+1\r\n else:\r\n out[i]=out[i>>1]\r\n \r\n return out",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2016-03-15 16:39:32
# @Author : Your Name ([email protected])
# @Link : http://example.org
# @Version : $Id$
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from widgets.favorits.favorit_win import Ui_DialogFavorit
import json
import re
from widgets.input_link import def_url
#from favorit_win import Ui_DialogFavorit
class Favorits(QDialog, Ui_DialogFavorit):
"""docstring for Favorits"""
def __init__(self):
super(Favorits, self).__init__()
self.setupUi(self)
self.buttonBox.button(QDialogButtonBox.Save).setText("Сохранить")
self.buttonBox.button(QDialogButtonBox.Cancel).setText("Отмена")
self.path = 'setting.json'
self.setStyleSheet(open('static/style.qss').read())
self.list_fav()
self.plus_pb.setIcon(QIcon(":/icons/icons/plus.png"))
self.minus_pb.setIcon(QIcon(":/icons/icons/minus.png"))
self.plus_pb.clicked.connect(self.addfav)
self.minus_pb.clicked.connect(self.delfav)
def list_fav(self):
try:
self.data = json.load(open(self.path))
for i in self.data['favorit']:
self.favlist_listWidget.addItem(i)
except FileNotFoundError:
print("File with setting not found")
except KeyError:
self.data['favorit'] = []
json.dump(self.data, open(self.path, 'w'))
self.list_fav()
def addfav(self):
name = def_url.Input_stream()
if name.exec_():
link = name.url_stream_le.text()
reg = "http[s]?://"
if re.match(reg, link) is not None:
self.data['favorit'].append(link)
json.dump(self.data, open(self.path, 'w'))
self.favlist_listWidget.clear()
self.list_fav()
def delfav(self):
buf = self.favlist_listWidget.currentItem().text()
self.data['favorit'].remove(buf)
json.dump(self.data, open(self.path, 'w'))
self.favlist_listWidget.clear()
self.list_fav()
if __name__ == '__main__':
app = QApplication([])
w = Favorits()
w.show()
app.exec_()
|
normal
|
{
"blob_id": "14023785983f493af57189b3d96254efef2e33ae",
"index": 8180,
"step-1": "<mask token>\n\n\nclass Favorits(QDialog, Ui_DialogFavorit):\n <mask token>\n\n def __init__(self):\n super(Favorits, self).__init__()\n self.setupUi(self)\n self.buttonBox.button(QDialogButtonBox.Save).setText('Сохранить')\n self.buttonBox.button(QDialogButtonBox.Cancel).setText('Отмена')\n self.path = 'setting.json'\n self.setStyleSheet(open('static/style.qss').read())\n self.list_fav()\n self.plus_pb.setIcon(QIcon(':/icons/icons/plus.png'))\n self.minus_pb.setIcon(QIcon(':/icons/icons/minus.png'))\n self.plus_pb.clicked.connect(self.addfav)\n self.minus_pb.clicked.connect(self.delfav)\n\n def list_fav(self):\n try:\n self.data = json.load(open(self.path))\n for i in self.data['favorit']:\n self.favlist_listWidget.addItem(i)\n except FileNotFoundError:\n print('File with setting not found')\n except KeyError:\n self.data['favorit'] = []\n json.dump(self.data, open(self.path, 'w'))\n self.list_fav()\n <mask token>\n\n def delfav(self):\n buf = self.favlist_listWidget.currentItem().text()\n self.data['favorit'].remove(buf)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Favorits(QDialog, Ui_DialogFavorit):\n <mask token>\n\n def __init__(self):\n super(Favorits, self).__init__()\n self.setupUi(self)\n self.buttonBox.button(QDialogButtonBox.Save).setText('Сохранить')\n self.buttonBox.button(QDialogButtonBox.Cancel).setText('Отмена')\n self.path = 'setting.json'\n self.setStyleSheet(open('static/style.qss').read())\n self.list_fav()\n self.plus_pb.setIcon(QIcon(':/icons/icons/plus.png'))\n self.minus_pb.setIcon(QIcon(':/icons/icons/minus.png'))\n self.plus_pb.clicked.connect(self.addfav)\n self.minus_pb.clicked.connect(self.delfav)\n\n def list_fav(self):\n try:\n self.data = json.load(open(self.path))\n for i in self.data['favorit']:\n self.favlist_listWidget.addItem(i)\n except FileNotFoundError:\n print('File with setting not found')\n except KeyError:\n self.data['favorit'] = []\n json.dump(self.data, open(self.path, 'w'))\n self.list_fav()\n\n def addfav(self):\n name = def_url.Input_stream()\n if name.exec_():\n link = name.url_stream_le.text()\n reg = 'http[s]?://'\n if re.match(reg, link) is not None:\n self.data['favorit'].append(link)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n def delfav(self):\n buf = self.favlist_listWidget.currentItem().text()\n self.data['favorit'].remove(buf)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Favorits(QDialog, Ui_DialogFavorit):\n \"\"\"docstring for Favorits\"\"\"\n\n def __init__(self):\n super(Favorits, self).__init__()\n self.setupUi(self)\n self.buttonBox.button(QDialogButtonBox.Save).setText('Сохранить')\n self.buttonBox.button(QDialogButtonBox.Cancel).setText('Отмена')\n self.path = 'setting.json'\n self.setStyleSheet(open('static/style.qss').read())\n self.list_fav()\n self.plus_pb.setIcon(QIcon(':/icons/icons/plus.png'))\n self.minus_pb.setIcon(QIcon(':/icons/icons/minus.png'))\n self.plus_pb.clicked.connect(self.addfav)\n self.minus_pb.clicked.connect(self.delfav)\n\n def list_fav(self):\n try:\n self.data = json.load(open(self.path))\n for i in self.data['favorit']:\n self.favlist_listWidget.addItem(i)\n except FileNotFoundError:\n print('File with setting not found')\n except KeyError:\n self.data['favorit'] = []\n json.dump(self.data, open(self.path, 'w'))\n self.list_fav()\n\n def addfav(self):\n name = def_url.Input_stream()\n if name.exec_():\n link = name.url_stream_le.text()\n reg = 'http[s]?://'\n if re.match(reg, link) is not None:\n self.data['favorit'].append(link)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n def delfav(self):\n buf = self.favlist_listWidget.currentItem().text()\n self.data['favorit'].remove(buf)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n\nif __name__ == '__main__':\n app = QApplication([])\n w = Favorits()\n w.show()\n app.exec_()\n",
"step-4": "from PyQt5.QtWidgets import *\nfrom PyQt5.QtCore import *\nfrom PyQt5.QtGui import *\nfrom widgets.favorits.favorit_win import Ui_DialogFavorit\nimport json\nimport re\nfrom widgets.input_link import def_url\n\n\nclass Favorits(QDialog, Ui_DialogFavorit):\n \"\"\"docstring for Favorits\"\"\"\n\n def __init__(self):\n super(Favorits, self).__init__()\n self.setupUi(self)\n self.buttonBox.button(QDialogButtonBox.Save).setText('Сохранить')\n self.buttonBox.button(QDialogButtonBox.Cancel).setText('Отмена')\n self.path = 'setting.json'\n self.setStyleSheet(open('static/style.qss').read())\n self.list_fav()\n self.plus_pb.setIcon(QIcon(':/icons/icons/plus.png'))\n self.minus_pb.setIcon(QIcon(':/icons/icons/minus.png'))\n self.plus_pb.clicked.connect(self.addfav)\n self.minus_pb.clicked.connect(self.delfav)\n\n def list_fav(self):\n try:\n self.data = json.load(open(self.path))\n for i in self.data['favorit']:\n self.favlist_listWidget.addItem(i)\n except FileNotFoundError:\n print('File with setting not found')\n except KeyError:\n self.data['favorit'] = []\n json.dump(self.data, open(self.path, 'w'))\n self.list_fav()\n\n def addfav(self):\n name = def_url.Input_stream()\n if name.exec_():\n link = name.url_stream_le.text()\n reg = 'http[s]?://'\n if re.match(reg, link) is not None:\n self.data['favorit'].append(link)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n def delfav(self):\n buf = self.favlist_listWidget.currentItem().text()\n self.data['favorit'].remove(buf)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n\nif __name__ == '__main__':\n app = QApplication([])\n w = Favorits()\n w.show()\n app.exec_()\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# @Date : 2016-03-15 16:39:32\n# @Author : Your Name ([email protected])\n# @Link : http://example.org\n# @Version : $Id$\n\nfrom PyQt5.QtWidgets import *\nfrom PyQt5.QtCore import *\nfrom PyQt5.QtGui import *\nfrom widgets.favorits.favorit_win import Ui_DialogFavorit\nimport json\nimport re\nfrom widgets.input_link import def_url\n#from favorit_win import Ui_DialogFavorit\n\n\nclass Favorits(QDialog, Ui_DialogFavorit):\n \"\"\"docstring for Favorits\"\"\"\n\n def __init__(self):\n super(Favorits, self).__init__()\n self.setupUi(self)\n self.buttonBox.button(QDialogButtonBox.Save).setText(\"Сохранить\")\n self.buttonBox.button(QDialogButtonBox.Cancel).setText(\"Отмена\")\n self.path = 'setting.json'\n self.setStyleSheet(open('static/style.qss').read())\n self.list_fav()\n self.plus_pb.setIcon(QIcon(\":/icons/icons/plus.png\"))\n self.minus_pb.setIcon(QIcon(\":/icons/icons/minus.png\"))\n self.plus_pb.clicked.connect(self.addfav)\n self.minus_pb.clicked.connect(self.delfav)\n\n def list_fav(self):\n try:\n self.data = json.load(open(self.path))\n for i in self.data['favorit']:\n self.favlist_listWidget.addItem(i)\n except FileNotFoundError:\n print(\"File with setting not found\")\n except KeyError:\n self.data['favorit'] = []\n json.dump(self.data, open(self.path, 'w'))\n self.list_fav()\n\n def addfav(self):\n name = def_url.Input_stream()\n if name.exec_():\n link = name.url_stream_le.text()\n reg = \"http[s]?://\"\n if re.match(reg, link) is not None:\n self.data['favorit'].append(link)\n json.dump(self.data, open(self.path, 'w'))\n\n self.favlist_listWidget.clear()\n self.list_fav()\n\n def delfav(self):\n buf = self.favlist_listWidget.currentItem().text()\n self.data['favorit'].remove(buf)\n json.dump(self.data, open(self.path, 'w'))\n self.favlist_listWidget.clear()\n self.list_fav()\n\n\nif __name__ == '__main__':\n app = QApplication([])\n w = Favorits()\n w.show()\n app.exec_()\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.